From c12a1dc5d8f04c3134be4131a77d283897500f9e Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Wed, 14 Mar 2012 14:53:27 +1100 Subject: [PATCH 1/9] path.exists*() as 2nd level head not 3rd --- doc/api/path.markdown | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/api/path.markdown b/doc/api/path.markdown index 9447066965..e37f4a7e1a 100644 --- a/doc/api/path.markdown +++ b/doc/api/path.markdown @@ -142,7 +142,7 @@ an empty string. Examples: // returns '' -### path.exists(p, [callback]) +## path.exists(p, [callback]) Test whether or not the given path exists by checking with the file system. Then call the `callback` argument with either true or false. Example: @@ -152,6 +152,6 @@ Then call the `callback` argument with either true or false. Example: }); -### path.existsSync(p) +## path.existsSync(p) Synchronous version of `path.exists`. From c834ef409e85eaa43782bb3edd844af8923f3056 Mon Sep 17 00:00:00 2001 From: Artur Adib Date: Fri, 9 Mar 2012 09:35:39 -0500 Subject: [PATCH 2/9] Doc highlight fix --- doc/images/sh_javascript.min.js | 1 - doc/images/sh_main.js | 553 -------------------------------- doc/template.html | 4 +- 3 files changed, 2 insertions(+), 556 deletions(-) delete mode 100644 doc/images/sh_javascript.min.js delete mode 100644 doc/images/sh_main.js diff --git a/doc/images/sh_javascript.min.js b/doc/images/sh_javascript.min.js deleted file mode 100644 index d12482ced2..0000000000 --- a/doc/images/sh_javascript.min.js +++ /dev/null @@ -1 +0,0 @@ -if(!this.sh_languages){this.sh_languages={}}sh_languages.javascript=[[[/\/\/\//g,"sh_comment",1],[/\/\//g,"sh_comment",7],[/\/\*\*/g,"sh_comment",8],[/\/\*/g,"sh_comment",9],[/\b(?:abstract|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|false|final|finally|for|function|goto|if|implements|in|instanceof|interface|native|new|null|private|protected|prototype|public|return|static|super|switch|synchronized|throw|throws|this|transient|true|try|typeof|var|volatile|while|with)\b/g,"sh_keyword",-1],[/(\+\+|--|\)|\])(\s*)(\/=?(?![*\/]))/g,["sh_symbol","sh_normal","sh_symbol"],-1],[/(0x[A-Fa-f0-9]+|(?:[\d]*\.)?[\d]+(?:[eE][+-]?[\d]+)?)(\s*)(\/(?![*\/]))/g,["sh_number","sh_normal","sh_symbol"],-1],[/([A-Za-z$_][A-Za-z0-9$_]*\s*)(\/=?(?![*\/]))/g,["sh_normal","sh_symbol"],-1],[/\/(?:\\.|[^*\\\/])(?:\\.|[^\\\/])*\/[gim]*/g,"sh_regexp",-1],[/\b[+-]?(?:(?:0x[A-Fa-f0-9]+)|(?:(?:[\d]*\.)?[\d]+(?:[eE][+-]?[\d]+)?))u?(?:(?:int(?:8|16|32|64))|L)?\b/g,"sh_number",-1],[/"/g,"sh_string",10],[/'/g,"sh_string",11],[/~|!|%|\^|\*|\(|\)|-|\+|=|\[|\]|\\|:|;|,|\.|\/|\?|&|<|>|\|/g,"sh_symbol",-1],[/\{|\}/g,"sh_cbracket",-1],[/\b(?:Math|Infinity|NaN|undefined|arguments)\b/g,"sh_predef_var",-1],[/\b(?:Array|Boolean|Date|Error|EvalError|Function|Number|Object|RangeError|ReferenceError|RegExp|String|SyntaxError|TypeError|URIError|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt)\b/g,"sh_predef_func",-1],[/(?:[A-Za-z]|_)[A-Za-z0-9_]*(?=[ \t]*\()/g,"sh_function",-1]],[[/$/g,null,-2],[/(?:?)|(?:?)/g,"sh_url",-1],[/<\?xml/g,"sh_preproc",2,1],[//g,"sh_keyword",-1],[/<(?:\/)?[A-Za-z](?:[A-Za-z0-9_:.-]*)/g,"sh_keyword",6,1],[/&(?:[A-Za-z0-9]+);/g,"sh_preproc",-1],[/<(?:\/)?[A-Za-z][A-Za-z0-9]*(?:\/)?>/g,"sh_keyword",-1],[/<(?:\/)?[A-Za-z][A-Za-z0-9]*/g,"sh_keyword",6,1],[/@[A-Za-z]+/g,"sh_type",-1],[/(?:TODO|FIXME|BUG)(?:[:]?)/g,"sh_todo",-1]],[[/\?>/g,"sh_preproc",-2],[/([^=" \t>]+)([ \t]*)(=?)/g,["sh_type","sh_normal","sh_symbol"],-1],[/"/g,"sh_string",3]],[[/\\(?:\\|")/g,null,-1],[/"/g,"sh_string",-2]],[[/>/g,"sh_preproc",-2],[/([^=" \t>]+)([ \t]*)(=?)/g,["sh_type","sh_normal","sh_symbol"],-1],[/"/g,"sh_string",3]],[[/-->/g,"sh_comment",-2],[/ + [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']), + MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])] + """ + if not prefix: prefix = [] + result = [] + excluded_result = [] + folders = dict() + # Gather files into the final result, excluded, or folders. + for s in sources: + if len(s) == 1: + filename = _NormalizedSource('\\'.join(prefix + s)) + if filename in excluded: + excluded_result.append(filename) + else: + result.append(filename) + else: + if not folders.get(s[0]): + folders[s[0]] = [] + folders[s[0]].append(s[1:]) + # Add a folder for excluded files. + if excluded_result and list_excluded: + excluded_folder = MSVSProject.Filter('_excluded_files', + contents=excluded_result) + result.append(excluded_folder) + # Populate all the folders. + for f in folders: + contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f], + excluded=excluded, + list_excluded=list_excluded) + contents = MSVSProject.Filter(f, contents=contents) + result.append(contents) + + return result + + +def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False): + if not value: return + # TODO(bradnelson): ugly hack, fix this more generally!!! + if 'Directories' in setting or 'Dependencies' in setting: + if type(value) == str: + value = value.replace('/', '\\') + else: + value = [i.replace('/', '\\') for i in value] + if not tools.get(tool_name): + tools[tool_name] = dict() + tool = tools[tool_name] + if tool.get(setting): + if only_if_unset: return + if type(tool[setting]) == list: + tool[setting] += value + else: + raise TypeError( + 'Appending "%s" to a non-list setting "%s" for tool "%s" is ' + 'not allowed, previous value: %s' % ( + value, setting, tool_name, str(tool[setting]))) + else: + tool[setting] = value + + +def _ConfigPlatform(config_data): + return config_data.get('msvs_configuration_platform', 'Win32') + + +def _ConfigBaseName(config_name, platform_name): + if config_name.endswith('_' + platform_name): + return config_name[0:-len(platform_name)-1] + else: + return config_name + + +def _ConfigFullName(config_name, config_data): + platform_name = _ConfigPlatform(config_data) + return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name) + + +def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, + quote_cmd): + + if [x for x in cmd if '$(InputDir)' in x]: + input_dir_preamble = ( + 'set INPUTDIR=$(InputDir)\n' + 'set INPUTDIR=%INPUTDIR:$(ProjectDir)=%\n' + 'set INPUTDIR=%INPUTDIR:~0,-1%\n' + ) + else: + input_dir_preamble = '' + + if cygwin_shell: + # Find path to cygwin. + cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0]) + # Prepare command. + direct_cmd = cmd + direct_cmd = [i.replace('$(IntDir)', + '`cygpath -m "${INTDIR}"`') for i in direct_cmd] + direct_cmd = [i.replace('$(OutDir)', + '`cygpath -m "${OUTDIR}"`') for i in direct_cmd] + direct_cmd = [i.replace('$(InputDir)', + '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd] + if has_input_path: + direct_cmd = [i.replace('$(InputPath)', + '`cygpath -m "${INPUTPATH}"`') + for i in direct_cmd] + direct_cmd = ['"%s"' % i for i in direct_cmd] + direct_cmd = [i.replace('"', '\\"') for i in direct_cmd] + #direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) + direct_cmd = ' '.join(direct_cmd) + # TODO(quote): regularize quoting path names throughout the module + cmd = ( + 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && ' + 'set CYGWIN=nontsec&& ') + if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0: + cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& ' + if direct_cmd.find('INTDIR') >= 0: + cmd += 'set INTDIR=$(IntDir)&& ' + if direct_cmd.find('OUTDIR') >= 0: + cmd += 'set OUTDIR=$(OutDir)&& ' + if has_input_path and direct_cmd.find('INPUTPATH') >= 0: + cmd += 'set INPUTPATH=$(InputPath) && ' + cmd += 'bash -c "%(cmd)s"' + cmd = cmd % {'cygwin_dir': cygwin_dir, + 'cmd': direct_cmd} + return input_dir_preamble + cmd + else: + # Convert cat --> type to mimic unix. + if cmd[0] == 'cat': + command = ['type'] + else: + command = [cmd[0].replace('/', '\\')] + # Fix the paths + # If the argument starts with a slash, it's probably a command line switch + arguments = [i.startswith('/') and i or _FixPath(i) for i in cmd[1:]] + arguments = [i.replace('$(InputDir)','%INPUTDIR%') for i in arguments] + if quote_cmd: + # Support a mode for using cmd directly. + # Convert any paths to native form (first element is used directly). + # TODO(quote): regularize quoting path names throughout the module + arguments = ['"%s"' % i for i in arguments] + # Collapse into a single command. + return input_dir_preamble + ' '.join(command + arguments) + + +def _BuildCommandLineForRule(spec, rule, has_input_path): + # Find path to cygwin. + cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0]) + + # Currently this weird argument munging is used to duplicate the way a + # python script would need to be run as part of the chrome tree. + # Eventually we should add some sort of rule_default option to set this + # per project. For now the behavior chrome needs is the default. + mcs = rule.get('msvs_cygwin_shell') + if mcs is None: + mcs = int(spec.get('msvs_cygwin_shell', 1)) + elif isinstance(mcs, str): + mcs = int(mcs) + quote_cmd = int(rule.get('msvs_quote_cmd', 1)) + return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path, + quote_cmd) + + +def _AddActionStep(actions_dict, inputs, outputs, description, command): + """Merge action into an existing list of actions. + + Care must be taken so that actions which have overlapping inputs either don't + get assigned to the same input, or get collapsed into one. + + Arguments: + actions_dict: dictionary keyed on input name, which maps to a list of + dicts describing the actions attached to that input file. + inputs: list of inputs + outputs: list of outputs + description: description of the action + command: command line to execute + """ + # Require there to be at least one input (call sites will ensure this). + assert inputs + + action = { + 'inputs': inputs, + 'outputs': outputs, + 'description': description, + 'command': command, + } + + # Pick where to stick this action. + # While less than optimal in terms of build time, attach them to the first + # input for now. + chosen_input = inputs[0] + + # Add it there. + if chosen_input not in actions_dict: + actions_dict[chosen_input] = [] + actions_dict[chosen_input].append(action) + + +def _AddCustomBuildToolForMSVS(p, spec, primary_input, + inputs, outputs, description, cmd): + """Add a custom build tool to execute something. + + Arguments: + p: the target project + spec: the target project dict + primary_input: input file to attach the build tool to + inputs: list of inputs + outputs: list of outputs + description: description of the action + cmd: command line to execute + """ + inputs = _FixPaths(inputs) + outputs = _FixPaths(outputs) + tool = MSVSProject.Tool( + 'VCCustomBuildTool', + {'Description': description, + 'AdditionalDependencies': ';'.join(inputs), + 'Outputs': ';'.join(outputs), + 'CommandLine': cmd, + }) + # Add to the properties of primary input for each config. + for config_name, c_data in spec['configurations'].iteritems(): + p.AddFileConfig(_FixPath(primary_input), + _ConfigFullName(config_name, c_data), tools=[tool]) + + +def _AddAccumulatedActionsToMSVS(p, spec, actions_dict): + """Add actions accumulated into an actions_dict, merging as needed. + + Arguments: + p: the target project + spec: the target project dict + actions_dict: dictionary keyed on input name, which maps to a list of + dicts describing the actions attached to that input file. + """ + for primary_input in actions_dict: + inputs = set() + outputs = set() + descriptions = [] + commands = [] + for action in actions_dict[primary_input]: + inputs.update(set(action['inputs'])) + outputs.update(set(action['outputs'])) + descriptions.append(action['description']) + commands.append(action['command']) + # Add the custom build step for one input file. + description = ', and also '.join(descriptions) + command = '\r\n'.join(commands) + _AddCustomBuildToolForMSVS(p, spec, + primary_input=primary_input, + inputs=inputs, + outputs=outputs, + description=description, + cmd=command) + + +def _RuleExpandPath(path, input_file): + """Given the input file to which a rule applied, string substitute a path. + + Arguments: + path: a path to string expand + input_file: the file to which the rule applied. + Returns: + The string substituted path. + """ + path = path.replace('$(InputName)', + os.path.splitext(os.path.split(input_file)[1])[0]) + path = path.replace('$(InputDir)', os.path.dirname(input_file)) + path = path.replace('$(InputExt)', + os.path.splitext(os.path.split(input_file)[1])[1]) + path = path.replace('$(InputFileName)', os.path.split(input_file)[1]) + path = path.replace('$(InputPath)', input_file) + return path + + +def _FindRuleTriggerFiles(rule, sources): + """Find the list of files which a particular rule applies to. + + Arguments: + rule: the rule in question + sources: the set of all known source files for this project + Returns: + The list of sources that trigger a particular rule. + """ + rule_ext = rule['extension'] + return [s for s in sources if s.endswith('.' + rule_ext)] + + +def _RuleInputsAndOutputs(rule, trigger_file): + """Find the inputs and outputs generated by a rule. + + Arguments: + rule: the rule in question. + trigger_file: the main trigger for this rule. + Returns: + The pair of (inputs, outputs) involved in this rule. + """ + raw_inputs = _FixPaths(rule.get('inputs', [])) + raw_outputs = _FixPaths(rule.get('outputs', [])) + inputs = set() + outputs = set() + inputs.add(trigger_file) + for i in raw_inputs: + inputs.add(_RuleExpandPath(i, trigger_file)) + for o in raw_outputs: + outputs.add(_RuleExpandPath(o, trigger_file)) + return (inputs, outputs) + + +def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options): + """Generate a native rules file. + + Arguments: + p: the target project + rules: the set of rules to include + output_dir: the directory in which the project/gyp resides + spec: the project dict + options: global generator options + """ + rules_filename = '%s%s.rules' % (spec['target_name'], + options.suffix) + rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename), + spec['target_name']) + # Add each rule. + for r in rules: + rule_name = r['rule_name'] + rule_ext = r['extension'] + inputs = _FixPaths(r.get('inputs', [])) + outputs = _FixPaths(r.get('outputs', [])) + cmd = _BuildCommandLineForRule(spec, r, has_input_path=True) + rules_file.AddCustomBuildRule(name=rule_name, + description=r.get('message', rule_name), + extensions=[rule_ext], + additional_dependencies=inputs, + outputs=outputs, + cmd=cmd) + # Write out rules file. + rules_file.WriteIfChanged() + + # Add rules file to project. + p.AddToolFile(rules_filename) + + +def _Cygwinify(path): + path = path.replace('$(OutDir)', '$(OutDirCygwin)') + path = path.replace('$(IntDir)', '$(IntDirCygwin)') + return path + + +def _GenerateExternalRules(rules, output_dir, spec, + sources, options, actions_to_add): + """Generate an external makefile to do a set of rules. + + Arguments: + rules: the list of rules to include + output_dir: path containing project and gyp files + spec: project specification data + sources: set of sources known + options: global generator options + actions_to_add: The list of actions we will add to. + """ + filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix) + mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename)) + # Find cygwin style versions of some paths. + mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') + mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') + # Gather stuff needed to emit all: target. + all_inputs = set() + all_outputs = set() + all_output_dirs = set() + first_outputs = [] + for rule in rules: + trigger_files = _FindRuleTriggerFiles(rule, sources) + for tf in trigger_files: + inputs, outputs = _RuleInputsAndOutputs(rule, tf) + all_inputs.update(set(inputs)) + all_outputs.update(set(outputs)) + # Only use one target from each rule as the dependency for + # 'all' so we don't try to build each rule multiple times. + first_outputs.append(list(outputs)[0]) + # Get the unique output directories for this rule. + output_dirs = [os.path.split(i)[0] for i in outputs] + for od in output_dirs: + all_output_dirs.add(od) + first_outputs_cyg = [_Cygwinify(i) for i in first_outputs] + # Write out all: target, including mkdir for each output directory. + mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg)) + for od in all_output_dirs: + if od: + mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od) + mk_file.write('\n') + # Define how each output is generated. + for rule in rules: + trigger_files = _FindRuleTriggerFiles(rule, sources) + for tf in trigger_files: + # Get all the inputs and outputs for this rule for this trigger file. + inputs, outputs = _RuleInputsAndOutputs(rule, tf) + inputs = [_Cygwinify(i) for i in inputs] + outputs = [_Cygwinify(i) for i in outputs] + # Prepare the command line for this rule. + cmd = [_RuleExpandPath(c, tf) for c in rule['action']] + cmd = ['"%s"' % i for i in cmd] + cmd = ' '.join(cmd) + # Add it to the makefile. + mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs))) + mk_file.write('\t%s\n\n' % cmd) + # Close up the file. + mk_file.close() + + # Add makefile to list of sources. + sources.add(filename) + # Add a build action to call makefile. + cmd = ['make', + 'OutDir=$(OutDir)', + 'IntDir=$(IntDir)', + '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}', + '-f', filename] + cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True) + # Insert makefile as 0'th input, so it gets the action attached there, + # as this is easier to understand from in the IDE. + all_inputs = list(all_inputs) + all_inputs.insert(0, filename) + _AddActionStep(actions_to_add, + inputs=_FixPaths(all_inputs), + outputs=_FixPaths(all_outputs), + description='Running %s' % cmd, + command=cmd) + + +def _EscapeEnvironmentVariableExpansion(s): + """Escapes % characters. + + Escapes any % characters so that Windows-style environment variable + expansions will leave them alone. + See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile + to understand why we have to do this. + + Args: + s: The string to be escaped. + + Returns: + The escaped string. + """ + s = s.replace('%', '%%') + return s + + +quote_replacer_regex = re.compile(r'(\\*)"') + + +def _EscapeCommandLineArgumentForMSVS(s): + """Escapes a Windows command-line argument. + + So that the Win32 CommandLineToArgv function will turn the escaped result back + into the original string. + See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx + ("Parsing C++ Command-Line Arguments") to understand why we have to do + this. + + Args: + s: the string to be escaped. + Returns: + the escaped string. + """ + + def _Replace(match): + # For a literal quote, CommandLineToArgv requires an odd number of + # backslashes preceding it, and it produces half as many literal backslashes + # (rounded down). So we need to produce 2n+1 backslashes. + return 2 * match.group(1) + '\\"' + + # Escape all quotes so that they are interpreted literally. + s = quote_replacer_regex.sub(_Replace, s) + # Now add unescaped quotes so that any whitespace is interpreted literally. + s = '"' + s + '"' + return s + + +delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)') + + +def _EscapeVCProjCommandLineArgListItem(s): + """Escapes command line arguments for MSVS. + + The VCProj format stores string lists in a single string using commas and + semi-colons as separators, which must be quoted if they are to be + interpreted literally. However, command-line arguments may already have + quotes, and the VCProj parser is ignorant of the backslash escaping + convention used by CommandLineToArgv, so the command-line quotes and the + VCProj quotes may not be the same quotes. So to store a general + command-line argument in a VCProj list, we need to parse the existing + quoting according to VCProj's convention and quote any delimiters that are + not already quoted by that convention. The quotes that we add will also be + seen by CommandLineToArgv, so if backslashes precede them then we also have + to escape those backslashes according to the CommandLineToArgv + convention. + + Args: + s: the string to be escaped. + Returns: + the escaped string. + """ + + def _Replace(match): + # For a non-literal quote, CommandLineToArgv requires an even number of + # backslashes preceding it, and it produces half as many literal + # backslashes. So we need to produce 2n backslashes. + return 2 * match.group(1) + '"' + match.group(2) + '"' + + segments = s.split('"') + # The unquoted segments are at the even-numbered indices. + for i in range(0, len(segments), 2): + segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i]) + # Concatenate back into a single string + s = '"'.join(segments) + if len(segments) % 2 == 0: + # String ends while still quoted according to VCProj's convention. This + # means the delimiter and the next list item that follow this one in the + # .vcproj file will be misinterpreted as part of this item. There is nothing + # we can do about this. Adding an extra quote would correct the problem in + # the VCProj but cause the same problem on the final command-line. Moving + # the item to the end of the list does works, but that's only possible if + # there's only one such item. Let's just warn the user. + print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' + + 'quotes in ' + s) + return s + + +def _EscapeCppDefineForMSVS(s): + """Escapes a CPP define so that it will reach the compiler unaltered.""" + s = _EscapeEnvironmentVariableExpansion(s) + s = _EscapeCommandLineArgumentForMSVS(s) + s = _EscapeVCProjCommandLineArgListItem(s) + return s + + +quote_replacer_regex2 = re.compile(r'(\\+)"') + + +def _EscapeCommandLineArgumentForMSBuild(s): + """Escapes a Windows command-line argument for use by MSBuild.""" + + def _Replace(match): + return (len(match.group(1))/2*4)*'\\' + '\\"' + + # Escape all quotes so that they are interpreted literally. + s = quote_replacer_regex2.sub(_Replace, s) + return s + + +def _EscapeMSBuildSpecialCharacters(s): + escape_dictionary = { + '%': '%25', + '$': '%24', + '@': '%40', + "'": '%27', + ';': '%3B', + '?': '%3F', + '*': '%2A' + } + result = ''.join([escape_dictionary.get(c, c) for c in s]) + return result + + +def _EscapeCppDefineForMSBuild(s): + """Escapes a CPP define so that it will reach the compiler unaltered.""" + s = _EscapeEnvironmentVariableExpansion(s) + s = _EscapeCommandLineArgumentForMSBuild(s) + s = _EscapeMSBuildSpecialCharacters(s) + return s + + +def _GenerateRulesForMSVS(p, output_dir, options, spec, + sources, excluded_sources, + actions_to_add): + """Generate all the rules for a particular project. + + Arguments: + p: the project + output_dir: directory to emit rules to + options: global options passed to the generator + spec: the specification for this project + sources: the set of all known source files in this project + excluded_sources: the set of sources excluded from normal processing + actions_to_add: deferred list of actions to add in + """ + rules = spec.get('rules', []) + rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))] + rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] + + # Handle rules that use a native rules file. + if rules_native: + _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options) + + # Handle external rules (non-native rules). + if rules_external: + _GenerateExternalRules(rules_external, output_dir, spec, + sources, options, actions_to_add) + _AdjustSourcesForRules(rules, sources, excluded_sources) + + +def _AdjustSourcesForRules(rules, sources, excluded_sources): + # Add outputs generated by each rule (if applicable). + for rule in rules: + # Done if not processing outputs as sources. + if int(rule.get('process_outputs_as_sources', False)): + # Add in the outputs from this rule. + trigger_files = _FindRuleTriggerFiles(rule, sources) + for trigger_file in trigger_files: + inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file) + inputs = set(_FixPaths(inputs)) + outputs = set(_FixPaths(outputs)) + inputs.remove(_FixPath(trigger_file)) + sources.update(inputs) + excluded_sources.update(inputs) + sources.update(outputs) + + +def _FilterActionsFromExcluded(excluded_sources, actions_to_add): + """Take inputs with actions attached out of the list of exclusions. + + Arguments: + excluded_sources: list of source files not to be built. + actions_to_add: dict of actions keyed on source file they're attached to. + Returns: + excluded_sources with files that have actions attached removed. + """ + must_keep = set(_FixPaths(actions_to_add.keys())) + return [s for s in excluded_sources if s not in must_keep] + + +def _GetDefaultConfiguration(spec): + return spec['configurations'][spec['default_configuration']] + + +def _GetGuidOfProject(proj_path, spec): + """Get the guid for the project. + + Arguments: + proj_path: Path of the vcproj or vcxproj file to generate. + spec: The target dictionary containing the properties of the target. + Returns: + the guid. + Raises: + ValueError: if the specified GUID is invalid. + """ + # Pluck out the default configuration. + default_config = _GetDefaultConfiguration(spec) + # Decide the guid of the project. + guid = default_config.get('msvs_guid') + if guid: + if VALID_MSVS_GUID_CHARS.match(guid) is None: + raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' % + (guid, VALID_MSVS_GUID_CHARS.pattern)) + guid = '{%s}' % guid + guid = guid or MSVSNew.MakeGuid(proj_path) + return guid + + +def _GenerateProject(project, options, version, generator_flags): + """Generates a vcproj file. + + Arguments: + project: the MSVSProject object. + options: global generator options. + version: the MSVSVersion object. + generator_flags: dict of generator-specific flags. + """ + default_config = _GetDefaultConfiguration(project.spec) + + # Skip emitting anything if told to with msvs_existing_vcproj option. + if default_config.get('msvs_existing_vcproj'): + return + + if version.UsesVcxproj(): + _GenerateMSBuildProject(project, options, version, generator_flags) + else: + _GenerateMSVSProject(project, options, version, generator_flags) + + +def _GenerateMSVSProject(project, options, version, generator_flags): + """Generates a .vcproj file. It may create .rules and .user files too. + + Arguments: + project: The project object we will generate the file for. + options: Global options passed to the generator. + version: The VisualStudioVersion object. + generator_flags: dict of generator-specific flags. + """ + spec = project.spec + vcproj_dir = os.path.dirname(project.path) + if vcproj_dir and not os.path.exists(vcproj_dir): + os.makedirs(vcproj_dir) + + platforms = _GetUniquePlatforms(spec) + p = MSVSProject.Writer(project.path, version, spec['target_name'], + project.guid, platforms) + + # Get directory project file is in. + project_dir = os.path.split(project.path)[0] + gyp_path = _NormalizedSource(project.build_file) + relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) + + config_type = _GetMSVSConfigurationType(spec, project.build_file) + for config_name, config in spec['configurations'].iteritems(): + _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) + + # Prepare list of sources and excluded sources. + gyp_file = os.path.split(project.build_file)[1] + sources, excluded_sources = _PrepareListOfSources(spec, gyp_file) + + # Add rules. + actions_to_add = {} + _GenerateRulesForMSVS(p, project_dir, options, spec, + sources, excluded_sources, + actions_to_add) + list_excluded = generator_flags.get('msvs_list_excluded_files', True) + sources, excluded_sources, excluded_idl = ( + _AdjustSourcesAndConvertToFilterHierarchy( + spec, options, project_dir, sources, excluded_sources, list_excluded)) + + # Add in files. + _VerifySourcesExist(sources, project_dir) + p.AddFiles(sources) + + _AddToolFilesToMSVS(p, spec) + _HandlePreCompiledHeaders(p, sources, spec) + _AddActions(actions_to_add, spec, relative_path_of_gyp_file) + _AddCopies(actions_to_add, spec) + _WriteMSVSUserFile(project.path, version, spec) + + # NOTE: this stanza must appear after all actions have been decided. + # Don't excluded sources with actions attached, or they won't run. + excluded_sources = _FilterActionsFromExcluded( + excluded_sources, actions_to_add) + _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, + list_excluded) + _AddAccumulatedActionsToMSVS(p, spec, actions_to_add) + + # Write it out. + p.WriteIfChanged() + + +def _GetUniquePlatforms(spec): + """Returns the list of unique platforms for this spec, e.g ['win32', ...]. + + Arguments: + spec: The target dictionary containing the properties of the target. + Returns: + The MSVSUserFile object created. + """ + # Gather list of unique platforms. + platforms = set() + for configuration in spec['configurations']: + platforms.add(_ConfigPlatform(spec['configurations'][configuration])) + platforms = list(platforms) + return platforms + + +def _CreateMSVSUserFile(proj_path, version, spec): + """Generates a .user file for the user running this Gyp program. + + Arguments: + proj_path: The path of the project file being created. The .user file + shares the same path (with an appropriate suffix). + version: The VisualStudioVersion object. + spec: The target dictionary containing the properties of the target. + Returns: + The MSVSUserFile object created. + """ + (domain, username) = _GetDomainAndUserName() + vcuser_filename = '.'.join([proj_path, domain, username, 'user']) + user_file = MSVSUserFile.Writer(vcuser_filename, version, + spec['target_name']) + return user_file + + +def _GetMSVSConfigurationType(spec, build_file): + """Returns the configuration type for this project. + + It's a number defined by Microsoft. May raise an exception. + + Args: + spec: The target dictionary containing the properties of the target. + build_file: The path of the gyp file. + Returns: + An integer, the configuration type. + """ + try: + config_type = { + 'executable': '1', # .exe + 'shared_library': '2', # .dll + 'loadable_module': '2', # .dll + 'static_library': '4', # .lib + 'none': '10', # Utility type + }[spec['type']] + except KeyError: + if spec.get('type'): + raise Exception('Target type %s is not a valid target type for ' + 'target %s in %s.' % + (spec['type'], spec['target_name'], build_file)) + else: + raise Exception('Missing type field for target %s in %s.' % + (spec['target_name'], build_file)) + return config_type + + +def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config): + """Adds a configuration to the MSVS project. + + Many settings in a vcproj file are specific to a configuration. This + function the main part of the vcproj file that's configuration specific. + + Arguments: + p: The target project being generated. + spec: The target dictionary containing the properties of the target. + config_type: The configuration type, a number as defined by Microsoft. + config_name: The name of the configuration. + config: The dictionnary that defines the special processing to be done + for this configuration. + """ + # Get the information for this configuration + include_dirs, resource_include_dirs = _GetIncludeDirs(config) + libraries = _GetLibraries(spec) + out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec) + defines = _GetDefines(config) + defines = [_EscapeCppDefineForMSVS(d) for d in defines] + disabled_warnings = _GetDisabledWarnings(config) + prebuild = config.get('msvs_prebuild') + postbuild = config.get('msvs_postbuild') + def_file = _GetModuleDefinition(spec) + precompiled_header = config.get('msvs_precompiled_header') + + # Prepare the list of tools as a dictionary. + tools = dict() + # Add in user specified msvs_settings. + msvs_settings = config.get('msvs_settings', {}) + MSVSSettings.ValidateMSVSSettings(msvs_settings) + for tool in msvs_settings: + settings = config['msvs_settings'][tool] + for setting in settings: + _ToolAppend(tools, tool, setting, settings[setting]) + # Add the information to the appropriate tool + _ToolAppend(tools, 'VCCLCompilerTool', + 'AdditionalIncludeDirectories', include_dirs) + _ToolAppend(tools, 'VCResourceCompilerTool', + 'AdditionalIncludeDirectories', resource_include_dirs) + # Add in libraries. + _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries) + if out_file: + _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True) + # Add defines. + _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines) + _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions', + defines) + # Change program database directory to prevent collisions. + _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName', + '$(IntDir)\\$(ProjectName)\\vc80.pdb', only_if_unset=True) + # Add disabled warnings. + _ToolAppend(tools, 'VCCLCompilerTool', + 'DisableSpecificWarnings', disabled_warnings) + # Add Pre-build. + _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild) + # Add Post-build. + _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild) + # Turn on precompiled headers if appropriate. + if precompiled_header: + precompiled_header = os.path.split(precompiled_header)[1] + _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2') + _ToolAppend(tools, 'VCCLCompilerTool', + 'PrecompiledHeaderThrough', precompiled_header) + _ToolAppend(tools, 'VCCLCompilerTool', + 'ForcedIncludeFiles', precompiled_header) + # Loadable modules don't generate import libraries; + # tell dependent projects to not expect one. + if spec['type'] == 'loadable_module': + _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true') + # Set the module definition file if any. + if def_file: + _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file) + + _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name) + + +def _GetIncludeDirs(config): + """Returns the list of directories to be used for #include directives. + + Arguments: + config: The dictionnary that defines the special processing to be done + for this configuration. + Returns: + The list of directory paths. + """ + # TODO(bradnelson): include_dirs should really be flexible enough not to + # require this sort of thing. + include_dirs = ( + config.get('include_dirs', []) + + config.get('msvs_system_include_dirs', [])) + resource_include_dirs = config.get('resource_include_dirs', include_dirs) + include_dirs = _FixPaths(include_dirs) + resource_include_dirs = _FixPaths(resource_include_dirs) + return include_dirs, resource_include_dirs + + +def _GetLibraries(spec): + """Returns the list of libraries for this configuration. + + Arguments: + spec: The target dictionary containing the properties of the target. + Returns: + The list of directory paths. + """ + libraries = spec.get('libraries', []) + # Strip out -l, as it is not used on windows (but is needed so we can pass + # in libraries that are assumed to be in the default library path). + # Also remove duplicate entries, leaving only the last duplicate, while + # preserving order. + found = set() + unique_libraries_list = [] + for entry in reversed(libraries): + library = re.sub('^\-l', '', entry) + if library not in found: + found.add(library) + unique_libraries_list.append(library) + unique_libraries_list.reverse() + return unique_libraries_list + + +def _GetOutputFilePathAndTool(spec): + """Returns the path and tool to use for this target. + + Figures out the path of the file this spec will create and the name of + the VC tool that will create it. + + Arguments: + spec: The target dictionary containing the properties of the target. + Returns: + A triple of (file path, name of the vc tool, name of the msbuild tool) + """ + # Select a name for the output file. + out_file = '' + vc_tool = '' + msbuild_tool = '' + output_file_map = { + 'executable': ('VCLinkerTool', 'Link', '$(OutDir)\\', '.exe'), + 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)\\', '.dll'), + 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)\\', '.dll'), + # TODO(jeanluc) If we want to avoid the MSB8012 warnings in + # VisualStudio 2010, we will have to change the value of $(OutDir) + # to contain the \lib suffix, rather than doing it as below. + 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)\\lib\\', '.lib'), + } + output_file_props = output_file_map.get(spec['type']) + if output_file_props and int(spec.get('msvs_auto_output_file', 1)): + vc_tool, msbuild_tool, out_dir, suffix = output_file_props + out_dir = spec.get('product_dir', out_dir) + product_extension = spec.get('product_extension') + if product_extension: + suffix = '.' + product_extension + prefix = spec.get('product_prefix', '') + product_name = spec.get('product_name', '$(ProjectName)') + out_file = ntpath.join(out_dir, prefix + product_name + suffix) + return out_file, vc_tool, msbuild_tool + + +def _GetDefines(config): + """Returns the list of preprocessor definitions for this configuation. + + Arguments: + config: The dictionnary that defines the special processing to be done + for this configuration. + Returns: + The list of preprocessor definitions. + """ + defines = [] + for d in config.get('defines', []): + if type(d) == list: + fd = '='.join([str(dpart) for dpart in d]) + else: + fd = str(d) + defines.append(fd) + return defines + + +def _GetDisabledWarnings(config): + return [str(i) for i in config.get('msvs_disabled_warnings', [])] + + +def _GetModuleDefinition(spec): + def_file = '' + if spec['type'] in ['shared_library', 'loadable_module']: + def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] + if len(def_files) == 1: + def_file = _FixPath(def_files[0]) + elif def_files: + raise ValueError( + 'Multiple module definition files in one target, target %s lists ' + 'multiple .def files: %s' % ( + spec['target_name'], ' '.join(def_files))) + return def_file + + +def _ConvertToolsToExpectedForm(tools): + """Convert tools to a form expected by Visual Studio. + + Arguments: + tools: A dictionnary of settings; the tool name is the key. + Returns: + A list of Tool objects. + """ + tool_list = [] + for tool, settings in tools.iteritems(): + # Collapse settings with lists. + settings_fixed = {} + for setting, value in settings.iteritems(): + if type(value) == list: + if ((tool == 'VCLinkerTool' and + setting == 'AdditionalDependencies') or + setting == 'AdditionalOptions'): + settings_fixed[setting] = ' '.join(value) + else: + settings_fixed[setting] = ';'.join(value) + else: + settings_fixed[setting] = value + # Add in this tool. + tool_list.append(MSVSProject.Tool(tool, settings_fixed)) + return tool_list + + +def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name): + """Add to the project file the configuration specified by config. + + Arguments: + p: The target project being generated. + spec: the target project dict. + tools: A dictionnary of settings; the tool name is the key. + config: The dictionnary that defines the special processing to be done + for this configuration. + config_type: The configuration type, a number as defined by Microsoft. + config_name: The name of the configuration. + """ + attributes = _GetMSVSAttributes(spec, config, config_type) + # Add in this configuration. + tool_list = _ConvertToolsToExpectedForm(tools) + p.AddConfig(_ConfigFullName(config_name, config), + attrs=attributes, tools=tool_list) + + +def _GetMSVSAttributes(spec, config, config_type): + # Prepare configuration attributes. + prepared_attrs = {} + source_attrs = config.get('msvs_configuration_attributes', {}) + for a in source_attrs: + prepared_attrs[a] = source_attrs[a] + # Add props files. + vsprops_dirs = config.get('msvs_props', []) + vsprops_dirs = _FixPaths(vsprops_dirs) + if vsprops_dirs: + prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs) + # Set configuration type. + prepared_attrs['ConfigurationType'] = config_type + output_dir = prepared_attrs.get('OutputDirectory', + '$(SolutionDir)$(ConfigurationName)') + # TODO(jeanluc) If we want to avoid the MSB8012 warning, we should + # add code like the following to place libraries in their own directory. + # if config_type == '4': + # output_dir = spec.get('product_dir', output_dir + '\\lib') + prepared_attrs['OutputDirectory'] = output_dir + if 'IntermediateDirectory' not in prepared_attrs: + intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)' + prepared_attrs['IntermediateDirectory'] = intermediate + return prepared_attrs + + +def _AddNormalizedSources(sources_set, sources_array): + sources = [_NormalizedSource(s) for s in sources_array] + sources_set.update(set(sources)) + + +def _PrepareListOfSources(spec, gyp_file): + """Prepare list of sources and excluded sources. + + Besides the sources specified directly in the spec, adds the gyp file so + that a change to it will cause a re-compile. Also adds appropriate sources + for actions and copies. Assumes later stage will un-exclude files which + have custom build steps attached. + + Arguments: + spec: The target dictionary containing the properties of the target. + gyp_file: The name of the gyp file. + Returns: + A pair of (list of sources, list of excluded sources). + The sources will be relative to the gyp file. + """ + sources = set() + _AddNormalizedSources(sources, spec.get('sources', [])) + excluded_sources = set() + # Add in the gyp file. + sources.add(gyp_file) + + # Add in 'action' inputs and outputs. + for a in spec.get('actions', []): + inputs = a.get('inputs', []) + inputs = [_NormalizedSource(i) for i in inputs] + # Add all inputs to sources and excluded sources. + inputs = set(inputs) + sources.update(inputs) + excluded_sources.update(inputs) + if int(a.get('process_outputs_as_sources', False)): + _AddNormalizedSources(sources, a.get('outputs', [])) + # Add in 'copies' inputs and outputs. + for cpy in spec.get('copies', []): + _AddNormalizedSources(sources, cpy.get('files', [])) + return (sources, excluded_sources) + + +def _AdjustSourcesAndConvertToFilterHierarchy( + spec, options, gyp_dir, sources, excluded_sources, list_excluded): + """Adjusts the list of sources and excluded sources. + + Also converts the sets to lists. + + Arguments: + spec: The target dictionary containing the properties of the target. + options: Global generator options. + gyp_dir: The path to the gyp file being processed. + sources: A set of sources to be included for this project. + excluded_sources: A set of sources to be excluded for this project. + Returns: + A trio of (list of sources, list of excluded sources, + path of excluded IDL file) + """ + # Exclude excluded sources coming into the generator. + excluded_sources.update(set(spec.get('sources_excluded', []))) + # Add excluded sources into sources for good measure. + sources.update(excluded_sources) + # Convert to proper windows form. + # NOTE: sources goes from being a set to a list here. + # NOTE: excluded_sources goes from being a set to a list here. + sources = _FixPaths(sources) + # Convert to proper windows form. + excluded_sources = _FixPaths(excluded_sources) + + excluded_idl = _IdlFilesHandledNonNatively(spec, sources) + + precompiled_related = _GetPrecompileRelatedFiles(spec) + # Find the excluded ones, minus the precompiled header related ones. + fully_excluded = [i for i in excluded_sources if i not in precompiled_related] + + # Convert to folders and the right slashes. + sources = [i.split('\\') for i in sources] + sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded, + list_excluded=list_excluded) + + return sources, excluded_sources, excluded_idl + + +def _IdlFilesHandledNonNatively(spec, sources): + # If any non-native rules use 'idl' as an extension exclude idl files. + # Gather a list here to use later. + using_idl = False + for rule in spec.get('rules', []): + if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)): + using_idl = True + break + if using_idl: + excluded_idl = [i for i in sources if i.endswith('.idl')] + else: + excluded_idl = [] + return excluded_idl + + +def _GetPrecompileRelatedFiles(spec): + # Gather a list of precompiled header related sources. + precompiled_related = [] + for _, config in spec['configurations'].iteritems(): + for k in precomp_keys: + f = config.get(k) + if f: + precompiled_related.append(_FixPath(f)) + return precompiled_related + + +def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, + list_excluded): + exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) + for file_name, excluded_configs in exclusions.iteritems(): + if (not list_excluded and + len(excluded_configs) == len(spec['configurations'])): + # If we're not listing excluded files, then they won't appear in the + # project, so don't try to configure them to be excluded. + pass + else: + for config_name, config in excluded_configs: + p.AddFileConfig(file_name, _ConfigFullName(config_name, config), + {'ExcludedFromBuild': 'true'}) + + +def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): + exclusions = {} + # Exclude excluded sources from being built. + for f in excluded_sources: + excluded_configs = [] + for config_name, config in spec['configurations'].iteritems(): + precomped = [_FixPath(config.get(i, '')) for i in precomp_keys] + # Don't do this for ones that are precompiled header related. + if f not in precomped: + excluded_configs.append((config_name, config)) + exclusions[f] = excluded_configs + # If any non-native rules use 'idl' as an extension exclude idl files. + # Exclude them now. + for f in excluded_idl: + excluded_configs = [] + for config_name, config in spec['configurations'].iteritems(): + excluded_configs.append((config_name, config)) + exclusions[f] = excluded_configs + return exclusions + + +def _AddToolFilesToMSVS(p, spec): + # Add in tool files (rules). + tool_files = set() + for _, config in spec['configurations'].iteritems(): + for f in config.get('msvs_tool_files', []): + tool_files.add(f) + for f in tool_files: + p.AddToolFile(f) + + +def _HandlePreCompiledHeaders(p, sources, spec): + # Pre-compiled header source stubs need a different compiler flag + # (generate precompiled header) and any source file not of the same + # kind (i.e. C vs. C++) as the precompiled header source stub needs + # to have use of precompiled headers disabled. + extensions_excluded_from_precompile = [] + for config_name, config in spec['configurations'].iteritems(): + source = config.get('msvs_precompiled_source') + if source: + source = _FixPath(source) + # UsePrecompiledHeader=1 for if using precompiled headers. + tool = MSVSProject.Tool('VCCLCompilerTool', + {'UsePrecompiledHeader': '1'}) + p.AddFileConfig(source, _ConfigFullName(config_name, config), + {}, tools=[tool]) + basename, extension = os.path.splitext(source) + if extension == '.c': + extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx'] + else: + extensions_excluded_from_precompile = ['.c'] + def DisableForSourceTree(source_tree): + for source in source_tree: + if isinstance(source, MSVSProject.Filter): + DisableForSourceTree(source.contents) + else: + basename, extension = os.path.splitext(source) + if extension in extensions_excluded_from_precompile: + for config_name, config in spec['configurations'].iteritems(): + tool = MSVSProject.Tool('VCCLCompilerTool', + {'UsePrecompiledHeader': '0', + 'ForcedIncludeFiles': '$(NOINHERIT)'}) + p.AddFileConfig(_FixPath(source), + _ConfigFullName(config_name, config), + {}, tools=[tool]) + # Do nothing if there was no precompiled source. + if extensions_excluded_from_precompile: + DisableForSourceTree(sources) + + +def _AddActions(actions_to_add, spec, relative_path_of_gyp_file): + # Add actions. + actions = spec.get('actions', []) + for a in actions: + cmd = _BuildCommandLineForRule(spec, a, has_input_path=False) + # Attach actions to the gyp file if nothing else is there. + inputs = a.get('inputs') or [relative_path_of_gyp_file] + # Add the action. + _AddActionStep(actions_to_add, + inputs=inputs, + outputs=a.get('outputs', []), + description=a.get('message', a['action_name']), + command=cmd) + + +def _WriteMSVSUserFile(project_path, version, spec): + # Add run_as and test targets. + if 'run_as' in spec: + run_as = spec['run_as'] + action = run_as.get('action', []) + environment = run_as.get('environment', []) + working_directory = run_as.get('working_directory', '.') + elif int(spec.get('test', 0)): + action = ['$(TargetPath)', '--gtest_print_time'] + environment = [] + working_directory = '.' + else: + return # Nothing to add + # Write out the user file. + user_file = _CreateMSVSUserFile(project_path, version, spec) + for config_name, c_data in spec['configurations'].iteritems(): + user_file.AddDebugSettings(_ConfigFullName(config_name, c_data), + action, environment, working_directory) + user_file.WriteIfChanged() + + +def _AddCopies(actions_to_add, spec): + copies = _GetCopies(spec) + for inputs, outputs, cmd, description in copies: + _AddActionStep(actions_to_add, inputs=inputs, outputs=outputs, + description=description, command=cmd) + + +def _GetCopies(spec): + copies = [] + # Add copies. + for cpy in spec.get('copies', []): + for src in cpy.get('files', []): + dst = os.path.join(cpy['destination'], os.path.basename(src)) + # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and + # outputs, so do the same for our generated command line. + if src.endswith('/'): + src_bare = src[:-1] + base_dir = posixpath.split(src_bare)[0] + outer_dir = posixpath.split(src_bare)[1] + cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % ( + _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir) + copies.append(([src], ['dummy_copies', dst], cmd, + 'Copying %s to %s' % (src, dst))) + else: + cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % ( + _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst)) + copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst))) + return copies + + +def _GetPathDict(root, path): + # |path| will eventually be empty (in the recursive calls) if it was initially + # relative; otherwise it will eventually end up as '\', 'D:\', etc. + if not path or path.endswith(os.sep): + return root + parent, folder = os.path.split(path) + parent_dict = _GetPathDict(root, parent) + if folder not in parent_dict: + parent_dict[folder] = dict() + return parent_dict[folder] + + +def _DictsToFolders(base_path, bucket, flat): + # Convert to folders recursively. + children = [] + for folder, contents in bucket.iteritems(): + if type(contents) == dict: + folder_children = _DictsToFolders(os.path.join(base_path, folder), + contents, flat) + if flat: + children += folder_children + else: + folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder), + name='(' + folder + ')', + entries=folder_children) + children.append(folder_children) + else: + children.append(contents) + return children + + +def _CollapseSingles(parent, node): + # Recursively explorer the tree of dicts looking for projects which are + # the sole item in a folder which has the same name as the project. Bring + # such projects up one level. + if (type(node) == dict and + len(node) == 1 and + node.keys()[0] == parent + '.vcproj'): + return node[node.keys()[0]] + if type(node) != dict: + return node + for child in node: + node[child] = _CollapseSingles(child, node[child]) + return node + + +def _GatherSolutionFolders(sln_projects, project_objects, flat): + root = {} + # Convert into a tree of dicts on path. + for p in sln_projects: + gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2] + gyp_dir = os.path.dirname(gyp_file) + path_dict = _GetPathDict(root, gyp_dir) + path_dict[target + '.vcproj'] = project_objects[p] + # Walk down from the top until we hit a folder that has more than one entry. + # In practice, this strips the top-level "src/" dir from the hierarchy in + # the solution. + while len(root) == 1 and type(root[root.keys()[0]]) == dict: + root = root[root.keys()[0]] + # Collapse singles. + root = _CollapseSingles('', root) + # Merge buckets until everything is a root entry. + return _DictsToFolders('', root, flat) + + +def _GetPathOfProject(qualified_target, spec, options, msvs_version): + default_config = _GetDefaultConfiguration(spec) + proj_filename = default_config.get('msvs_existing_vcproj') + if not proj_filename: + proj_filename = (spec['target_name'] + options.suffix + + msvs_version.ProjectExtension()) + + build_file = gyp.common.BuildFile(qualified_target) + proj_path = os.path.join(os.path.split(build_file)[0], proj_filename) + fix_prefix = None + if options.generator_output: + project_dir_path = os.path.dirname(os.path.abspath(proj_path)) + proj_path = os.path.join(options.generator_output, proj_path) + fix_prefix = gyp.common.RelativePath(project_dir_path, + os.path.dirname(proj_path)) + return proj_path, fix_prefix + + +def _GetPlatformOverridesOfProject(spec): + # Prepare a dict indicating which project configurations are used for which + # solution configurations for this target. + config_platform_overrides = {} + for config_name, c in spec['configurations'].iteritems(): + config_fullname = _ConfigFullName(config_name, c) + platform = c.get('msvs_target_platform', _ConfigPlatform(c)) + fixed_config_fullname = '%s|%s' % ( + _ConfigBaseName(config_name, _ConfigPlatform(c)), platform) + config_platform_overrides[config_fullname] = fixed_config_fullname + return config_platform_overrides + + +def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): + """Create a MSVSProject object for the targets found in target list. + + Arguments: + target_list: the list of targets to generate project objects for. + target_dicts: the dictionary of specifications. + options: global generator options. + msvs_version: the MSVSVersion object. + Returns: + A set of created projects, keyed by target. + """ + global fixpath_prefix + # Generate each project. + projects = {} + for qualified_target in target_list: + spec = target_dicts[qualified_target] + if spec['toolset'] != 'target': + raise Exception( + 'Multiple toolsets not supported in msvs build (target %s)' % + qualified_target) + proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec, + options, msvs_version) + guid = _GetGuidOfProject(proj_path, spec) + overrides = _GetPlatformOverridesOfProject(spec) + build_file = gyp.common.BuildFile(qualified_target) + # Create object for this project. + obj = MSVSNew.MSVSProject( + _FixPath(proj_path), + name=spec['target_name'], + guid=guid, + spec=spec, + build_file=build_file, + config_platform_overrides=overrides, + fixpath_prefix=fixpath_prefix) + projects[qualified_target] = obj + # Set all the dependencies + for project in projects.values(): + deps = project.spec.get('dependencies', []) + deps = [projects[d] for d in deps] + project.set_dependencies(deps) + return projects + + +def CalculateVariables(default_variables, params): + """Generated variables that require params to be known.""" + + generator_flags = params.get('generator_flags', {}) + + # Select project file format version (if unset, default to auto detecting). + msvs_version = MSVSVersion.SelectVisualStudioVersion( + generator_flags.get('msvs_version', 'auto')) + # Stash msvs_version for later (so we don't have to probe the system twice). + params['msvs_version'] = msvs_version + + # Set a variable so conditions can be based on msvs_version. + default_variables['MSVS_VERSION'] = msvs_version.ShortName() + + # To determine processor word size on Windows, in addition to checking + # PROCESSOR_ARCHITECTURE (which reflects the word size of the current + # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which + # contains the actual word size of the system when running thru WOW64). + if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or + os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0): + default_variables['MSVS_OS_BITS'] = 64 + else: + default_variables['MSVS_OS_BITS'] = 32 + + +def _ShardName(name, number): + """Add a shard number to the end of a target. + + Arguments: + name: name of the target (foo#target) + number: shard number + Returns: + Target name with shard added (foo_1#target) + """ + parts = name.rsplit('#', 1) + parts[0] = '%s_%d' % (parts[0], number) + return '#'.join(parts) + + +def _ShardTargets(target_list, target_dicts): + """Shard some targets apart to work around the linkers limits. + + Arguments: + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + Returns: + Tuple of the new sharded versions of the inputs. + """ + # Gather the targets to shard, and how many pieces. + targets_to_shard = {} + for t in target_dicts: + shards = int(target_dicts[t].get('msvs_shard', 0)) + if shards: + targets_to_shard[t] = shards + # Shard target_list. + new_target_list = [] + for t in target_list: + if t in targets_to_shard: + for i in range(targets_to_shard[t]): + new_target_list.append(_ShardName(t, i)) + else: + new_target_list.append(t) + # Shard target_dict. + new_target_dicts = {} + for t in target_dicts: + if t in targets_to_shard: + for i in range(targets_to_shard[t]): + name = _ShardName(t, i) + new_target_dicts[name] = copy.copy(target_dicts[t]) + new_target_dicts[name]['target_name'] = _ShardName( + new_target_dicts[name]['target_name'], i) + sources = new_target_dicts[name].get('sources', []) + new_sources = [] + for pos in range(i, len(sources), targets_to_shard[t]): + new_sources.append(sources[pos]) + new_target_dicts[name]['sources'] = new_sources + else: + new_target_dicts[t] = target_dicts[t] + # Shard dependencies. + for t in new_target_dicts: + dependencies = copy.copy(new_target_dicts[t].get('dependencies', [])) + new_dependencies = [] + for d in dependencies: + if d in targets_to_shard: + for i in range(targets_to_shard[d]): + new_dependencies.append(_ShardName(d, i)) + else: + new_dependencies.append(d) + new_target_dicts[t]['dependencies'] = new_dependencies + + return (new_target_list, new_target_dicts) + + +def GenerateOutput(target_list, target_dicts, data, params): + """Generate .sln and .vcproj files. + + This is the entry point for this generator. + Arguments: + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + data: Dictionary containing per .gyp data. + """ + global fixpath_prefix + + options = params['options'] + + # Get the project file format version back out of where we stashed it in + # GeneratorCalculatedVariables. + msvs_version = params['msvs_version'] + + generator_flags = params.get('generator_flags', {}) + + # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT. + (target_list, target_dicts) = _ShardTargets(target_list, target_dicts) + + # Prepare the set of configurations. + configs = set() + for qualified_target in target_list: + spec = target_dicts[qualified_target] + for config_name, config in spec['configurations'].iteritems(): + configs.add(_ConfigFullName(config_name, config)) + configs = list(configs) + + # Figure out all the projects that will be generated and their guids + project_objects = _CreateProjectObjects(target_list, target_dicts, options, + msvs_version) + + # Generate each project. + for project in project_objects.values(): + fixpath_prefix = project.fixpath_prefix + _GenerateProject(project, options, msvs_version, generator_flags) + fixpath_prefix = None + + for build_file in data: + # Validate build_file extension + if build_file[-4:] != '.gyp': + continue + sln_path = build_file[:-4] + options.suffix + '.sln' + if options.generator_output: + sln_path = os.path.join(options.generator_output, sln_path) + # Get projects in the solution, and their dependents. + sln_projects = gyp.common.BuildFileTargets(target_list, build_file) + sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects) + # Create folder hierarchy. + root_entries = _GatherSolutionFolders( + sln_projects, project_objects, flat=msvs_version.FlatSolution()) + # Create solution. + sln = MSVSNew.MSVSSolution(sln_path, + entries=root_entries, + variants=configs, + websiteProperties=False, + version=msvs_version) + sln.Write() + + +def _GenerateMSBuildFiltersFile(filters_path, source_files, + extension_to_rule_name): + """Generate the filters file. + + This file is used by Visual Studio to organize the presentation of source + files into folders. + + Arguments: + filters_path: The path of the file to be created. + source_files: The hierarchical structure of all the sources. + extension_to_rule_name: A dictionary mapping file extensions to rules. + """ + filter_group = [] + source_group = [] + _AppendFiltersForMSBuild('', source_files, extension_to_rule_name, + filter_group, source_group) + if filter_group: + content = ['Project', + {'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003' + }, + ['ItemGroup'] + filter_group, + ['ItemGroup'] + source_group + ] + easy_xml.WriteXmlIfChanged(content, filters_path) + elif os.path.exists(filters_path): + # We don't need this filter anymore. Delete the old filter file. + os.unlink(filters_path) + + +def _AppendFiltersForMSBuild(parent_filter_name, sources, + extension_to_rule_name, + filter_group, source_group): + """Creates the list of filters and sources to be added in the filter file. + + Args: + parent_filter_name: The name of the filter under which the sources are + found. + sources: The hierarchy of filters and sources to process. + extension_to_rule_name: A dictionary mapping file extensions to rules. + filter_group: The list to which filter entries will be appended. + source_group: The list to which source entries will be appeneded. + """ + for source in sources: + if isinstance(source, MSVSProject.Filter): + # We have a sub-filter. Create the name of that sub-filter. + if not parent_filter_name: + filter_name = source.name + else: + filter_name = '%s\\%s' % (parent_filter_name, source.name) + # Add the filter to the group. + filter_group.append( + ['Filter', {'Include': filter_name}, + ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]]) + # Recurse and add its dependents. + _AppendFiltersForMSBuild(filter_name, source.contents, + extension_to_rule_name, + filter_group, source_group) + else: + # It's a source. Create a source entry. + _, element = _MapFileToMsBuildSourceType(source, extension_to_rule_name) + source_entry = [element, {'Include': source}] + # Specify the filter it is part of, if any. + if parent_filter_name: + source_entry.append(['Filter', parent_filter_name]) + source_group.append(source_entry) + + +def _MapFileToMsBuildSourceType(source, extension_to_rule_name): + """Returns the group and element type of the source file. + + Arguments: + source: The source file name. + extension_to_rule_name: A dictionary mapping file extensions to rules. + + Returns: + A pair of (group this file should be part of, the label of element) + """ + _, ext = os.path.splitext(source) + if ext in extension_to_rule_name: + group = 'rule' + element = extension_to_rule_name[ext] + elif ext in ['.cc', '.cpp', '.c', '.cxx']: + group = 'compile' + element = 'ClCompile' + elif ext in ['.h', '.hxx']: + group = 'include' + element = 'ClInclude' + elif ext == '.rc': + group = 'resource' + element = 'ResourceCompile' + elif ext == '.idl': + group = 'midl' + element = 'Midl' + else: + group = 'none' + element = 'None' + return (group, element) + + +def _GenerateRulesForMSBuild(output_dir, options, spec, + sources, excluded_sources, + props_files_of_rules, targets_files_of_rules, + actions_to_add, extension_to_rule_name): + # MSBuild rules are implemented using three files: an XML file, a .targets + # file and a .props file. + # See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx + # for more details. + rules = spec.get('rules', []) + rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))] + rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] + + msbuild_rules = [] + for rule in rules_native: + msbuild_rule = MSBuildRule(rule, spec) + msbuild_rules.append(msbuild_rule) + extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name + if msbuild_rules: + base = spec['target_name'] + options.suffix + props_name = base + '.props' + targets_name = base + '.targets' + xml_name = base + '.xml' + + props_files_of_rules.add(props_name) + targets_files_of_rules.add(targets_name) + + props_path = os.path.join(output_dir, props_name) + targets_path = os.path.join(output_dir, targets_name) + xml_path = os.path.join(output_dir, xml_name) + + _GenerateMSBuildRulePropsFile(props_path, msbuild_rules) + _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules) + _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules) + + if rules_external: + _GenerateExternalRules(rules_external, output_dir, spec, + sources, options, actions_to_add) + _AdjustSourcesForRules(rules, sources, excluded_sources) + + +class MSBuildRule(object): + """Used to store information used to generate an MSBuild rule. + + Attributes: + rule_name: The rule name, sanitized to use in XML. + target_name: The name of the target. + after_targets: The name of the AfterTargets element. + before_targets: The name of the BeforeTargets element. + depends_on: The name of the DependsOn element. + compute_output: The name of the ComputeOutput element. + dirs_to_make: The name of the DirsToMake element. + tlog: The name of the _tlog element. + extension: The extension this rule applies to. + description: The message displayed when this rule is invoked. + additional_dependencies: A string listing additional dependencies. + outputs: The outputs of this rule. + command: The command used to run the rule. + """ + + def __init__(self, rule, spec): + self.display_name = rule['rule_name'] + # Assure that the rule name is only characters and numbers + self.rule_name = re.sub(r'\W', '_', self.display_name) + # Create the various element names, following the example set by the + # Visual Studio 2008 to 2010 conversion. I don't know if VS2010 + # is sensitive to the exact names. + self.target_name = '_' + self.rule_name + self.after_targets = self.rule_name + 'AfterTargets' + self.before_targets = self.rule_name + 'BeforeTargets' + self.depends_on = self.rule_name + 'DependsOn' + self.compute_output = 'Compute%sOutput' % self.rule_name + self.dirs_to_make = self.rule_name + 'DirsToMake' + self.tlog = self.rule_name + '_tlog' + self.extension = rule['extension'] + if not self.extension.startswith('.'): + self.extension = '.' + self.extension + + self.description = MSVSSettings.ConvertVCMacrosToMSBuild( + rule.get('message', self.rule_name)) + old_additional_dependencies = _FixPaths(rule.get('inputs', [])) + self.additional_dependencies = ( + ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i) + for i in old_additional_dependencies])) + old_outputs = _FixPaths(rule.get('outputs', [])) + self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i) + for i in old_outputs]) + old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True) + self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command) + + +def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules): + """Generate the .props file.""" + content = ['Project', + {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}] + for rule in msbuild_rules: + content.extend([ + ['PropertyGroup', + {'Condition': "'$(%s)' == '' and '$(%s)' == '' and " + "'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets, + rule.after_targets) + }, + [rule.before_targets, 'Midl'], + [rule.after_targets, 'CustomBuild'], + ], + ['PropertyGroup', + [rule.depends_on, + {'Condition': "'$(ConfigurationType)' != 'Makefile'"}, + '_SelectedFiles;$(%s)' % rule.depends_on + ], + ], + ['ItemDefinitionGroup', + [rule.rule_name, + ['CommandLineTemplate', rule.command], + ['Outputs', rule.outputs], + ['ExecutionDescription', rule.description], + ['AdditionalDependencies', rule.additional_dependencies], + ], + ] + ]) + easy_xml.WriteXmlIfChanged(content, props_path) + + +def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules): + """Generate the .targets file.""" + content = ['Project', + {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003' + } + ] + item_group = [ + 'ItemGroup', + ['PropertyPageSchema', + {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'} + ] + ] + for rule in msbuild_rules: + item_group.append( + ['AvailableItemName', + {'Include': rule.rule_name}, + ['Targets', rule.target_name], + ]) + content.append(item_group) + + for rule in msbuild_rules: + content.append( + ['UsingTask', + {'TaskName': rule.rule_name, + 'TaskFactory': 'XamlTaskFactory', + 'AssemblyName': 'Microsoft.Build.Tasks.v4.0' + }, + ['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'], + ]) + for rule in msbuild_rules: + rule_name = rule.rule_name + target_outputs = '%%(%s.Outputs)' % rule_name + target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);' + '$(MSBuildProjectFile)') % (rule_name, rule_name) + rule_inputs = '%%(%s.Identity)' % rule_name + extension_condition = ("'%(Extension)'=='.obj' or " + "'%(Extension)'=='.res' or " + "'%(Extension)'=='.rsc' or " + "'%(Extension)'=='.lib'") + remove_section = [ + 'ItemGroup', + {'Condition': "'@(SelectedFiles)' != ''"}, + [rule_name, + {'Remove': '@(%s)' % rule_name, + 'Condition': "'%(Identity)' != '@(SelectedFiles)'" + } + ] + ] + logging_section = [ + 'ItemGroup', + [rule.tlog, + {'Include': '%%(%s.Outputs)' % rule_name, + 'Condition': ("'%%(%s.Outputs)' != '' and " + "'%%(%s.ExcludedFromBuild)' != 'true'" % + (rule_name, rule_name)) + }, + ['Source', "@(%s, '|')" % rule_name], + ], + ] + message_section = [ + 'Message', + {'Importance': 'High', + 'Text': '%%(%s.ExecutionDescription)' % rule_name + } + ] + write_lines_section = [ + 'WriteLinesToFile', + {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " + "'true'" % (rule.tlog, rule.tlog), + 'File': '$(IntDir)$(ProjectName).write.1.tlog', + 'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog, + rule.tlog) + } + ] + command_and_input_section = [ + rule_name, + {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " + "'true'" % (rule_name, rule_name), + 'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name, + 'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name, + 'Inputs': rule_inputs + } + ] + content.extend([ + ['Target', + {'Name': rule.target_name, + 'BeforeTargets': '$(%s)' % rule.before_targets, + 'AfterTargets': '$(%s)' % rule.after_targets, + 'Condition': "'@(%s)' != ''" % rule_name, + 'DependsOnTargets': '$(%s);%s' % (rule.depends_on, + rule.compute_output), + 'Outputs': target_outputs, + 'Inputs': target_inputs + }, + remove_section, + logging_section, + message_section, + write_lines_section, + command_and_input_section, + ], + ['PropertyGroup', + ['ComputeLinkInputsTargets', + '$(ComputeLinkInputsTargets);', + '%s;' % rule.compute_output + ], + ['ComputeLibInputsTargets', + '$(ComputeLibInputsTargets);', + '%s;' % rule.compute_output + ], + ], + ['Target', + {'Name': rule.compute_output, + 'Condition': "'@(%s)' != ''" % rule_name + }, + ['ItemGroup', + [rule.dirs_to_make, + {'Condition': "'@(%s)' != '' and " + "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name), + 'Include': '%%(%s.Outputs)' % rule_name + } + ], + ['Link', + {'Include': '%%(%s.Identity)' % rule.dirs_to_make, + 'Condition': extension_condition + } + ], + ['Lib', + {'Include': '%%(%s.Identity)' % rule.dirs_to_make, + 'Condition': extension_condition + } + ], + ['ImpLib', + {'Include': '%%(%s.Identity)' % rule.dirs_to_make, + 'Condition': extension_condition + } + ], + ], + ['MakeDir', + {'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" % + rule.dirs_to_make) + } + ] + ], + ]) + easy_xml.WriteXmlIfChanged(content, targets_path) + + +def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules): + # Generate the .xml file + content = [ + 'ProjectSchemaDefinitions', + {'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;' + 'assembly=Microsoft.Build.Framework'), + 'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml', + 'xmlns:sys': 'clr-namespace:System;assembly=mscorlib', + 'xmlns:transformCallback': + 'Microsoft.Cpp.Dev10.ConvertPropertyCallback' + } + ] + for rule in msbuild_rules: + content.extend([ + ['Rule', + {'Name': rule.rule_name, + 'PageTemplate': 'tool', + 'DisplayName': rule.display_name, + 'Order': '200' + }, + ['Rule.DataSource', + ['DataSource', + {'Persistence': 'ProjectFile', + 'ItemType': rule.rule_name + } + ] + ], + ['Rule.Categories', + ['Category', + {'Name': 'General'}, + ['Category.DisplayName', + ['sys:String', 'General'], + ], + ], + ['Category', + {'Name': 'Command Line', + 'Subtype': 'CommandLine' + }, + ['Category.DisplayName', + ['sys:String', 'Command Line'], + ], + ], + ], + ['StringListProperty', + {'Name': 'Inputs', + 'Category': 'Command Line', + 'IsRequired': 'true', + 'Switch': ' ' + }, + ['StringListProperty.DataSource', + ['DataSource', + {'Persistence': 'ProjectFile', + 'ItemType': rule.rule_name, + 'SourceType': 'Item' + } + ] + ], + ], + ['StringProperty', + {'Name': 'CommandLineTemplate', + 'DisplayName': 'Command Line', + 'Visible': 'False', + 'IncludeInCommandLine': 'False' + } + ], + ['DynamicEnumProperty', + {'Name': rule.before_targets, + 'Category': 'General', + 'EnumProvider': 'Targets', + 'IncludeInCommandLine': 'False' + }, + ['DynamicEnumProperty.DisplayName', + ['sys:String', 'Execute Before'], + ], + ['DynamicEnumProperty.Description', + ['sys:String', 'Specifies the targets for the build customization' + ' to run before.' + ], + ], + ['DynamicEnumProperty.ProviderSettings', + ['NameValuePair', + {'Name': 'Exclude', + 'Value': '^%s|^Compute' % rule.before_targets + } + ] + ], + ['DynamicEnumProperty.DataSource', + ['DataSource', + {'Persistence': 'ProjectFile', + 'HasConfigurationCondition': 'true' + } + ] + ], + ], + ['DynamicEnumProperty', + {'Name': rule.after_targets, + 'Category': 'General', + 'EnumProvider': 'Targets', + 'IncludeInCommandLine': 'False' + }, + ['DynamicEnumProperty.DisplayName', + ['sys:String', 'Execute After'], + ], + ['DynamicEnumProperty.Description', + ['sys:String', ('Specifies the targets for the build customization' + ' to run after.') + ], + ], + ['DynamicEnumProperty.ProviderSettings', + ['NameValuePair', + {'Name': 'Exclude', + 'Value': '^%s|^Compute' % rule.after_targets + } + ] + ], + ['DynamicEnumProperty.DataSource', + ['DataSource', + {'Persistence': 'ProjectFile', + 'ItemType': '', + 'HasConfigurationCondition': 'true' + } + ] + ], + ], + ['StringListProperty', + {'Name': 'Outputs', + 'DisplayName': 'Outputs', + 'Visible': 'False', + 'IncludeInCommandLine': 'False' + } + ], + ['StringProperty', + {'Name': 'ExecutionDescription', + 'DisplayName': 'Execution Description', + 'Visible': 'False', + 'IncludeInCommandLine': 'False' + } + ], + ['StringListProperty', + {'Name': 'AdditionalDependencies', + 'DisplayName': 'Additional Dependencies', + 'IncludeInCommandLine': 'False', + 'Visible': 'false' + } + ], + ['StringProperty', + {'Subtype': 'AdditionalOptions', + 'Name': 'AdditionalOptions', + 'Category': 'Command Line' + }, + ['StringProperty.DisplayName', + ['sys:String', 'Additional Options'], + ], + ['StringProperty.Description', + ['sys:String', 'Additional Options'], + ], + ], + ], + ['ItemType', + {'Name': rule.rule_name, + 'DisplayName': rule.display_name + } + ], + ['FileExtension', + {'Name': '*' + rule.extension, + 'ContentType': rule.rule_name + } + ], + ['ContentType', + {'Name': rule.rule_name, + 'DisplayName': '', + 'ItemType': rule.rule_name + } + ] + ]) + easy_xml.WriteXmlIfChanged(content, xml_path) + + +def _GetConfigurationAndPlatform(name, settings): + configuration = name.rsplit('_', 1)[0] + platform = settings.get('msvs_configuration_platform', 'Win32') + return (configuration, platform) + + +def _GetConfigurationCondition(name, settings): + return (r"'$(Configuration)|$(Platform)'=='%s|%s'" % + _GetConfigurationAndPlatform(name, settings)) + + +def _GetMSBuildProjectConfigurations(configurations): + group = ['ItemGroup', {'Label': 'ProjectConfigurations'}] + for (name, settings) in sorted(configurations.iteritems()): + configuration, platform = _GetConfigurationAndPlatform(name, settings) + designation = '%s|%s' % (configuration, platform) + group.append( + ['ProjectConfiguration', {'Include': designation}, + ['Configuration', configuration], + ['Platform', platform]]) + return [group] + + +def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name): + prefix = spec.get('product_prefix', '') + product_name = spec.get('product_name', '$(ProjectName)') + target_name = prefix + product_name + namespace = os.path.splitext(gyp_file_name)[0] + return [ + ['PropertyGroup', {'Label': 'Globals'}, + ['ProjectGuid', guid], + ['Keyword', 'Win32Proj'], + ['RootNamespace', namespace], + ['TargetName', target_name], + ] + ] + + +def _GetMSBuildConfigurationDetails(spec, build_file): + properties = {} + for name, settings in spec['configurations'].iteritems(): + msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) + condition = _GetConfigurationCondition(name, settings) + character_set = msbuild_attributes.get('CharacterSet') + _AddConditionalProperty(properties, condition, 'ConfigurationType', + msbuild_attributes['ConfigurationType']) + if character_set: + _AddConditionalProperty(properties, condition, 'CharacterSet', + character_set) + return _GetMSBuildPropertyGroup(spec, 'Configuration', properties) + + +def _GetMSBuildPropertySheets(configurations): + user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props' + return [ + ['ImportGroup', + {'Label': 'PropertySheets'}, + ['Import', + {'Project': user_props, + 'Condition': "exists('%s')" % user_props, + 'Label': 'LocalAppDataPlatform' + } + ] + ] + ] + + +def _GetMSBuildAttributes(spec, config, build_file): + # Use the MSVS attributes and convert them. In the future, we may want to + # support Gyp files specifying 'msbuild_configuration_attributes' directly. + config_type = _GetMSVSConfigurationType(spec, build_file) + msvs_attributes = _GetMSVSAttributes(spec, config, config_type) + msbuild_attributes = {} + for a in msvs_attributes: + if a in ['IntermediateDirectory', 'OutputDirectory']: + directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a]) + if not directory.endswith('\\'): + directory += '\\' + msbuild_attributes[a] = directory + elif a == 'CharacterSet': + msbuild_attributes[a] = { + '0': 'MultiByte', + '1': 'Unicode' + }[msvs_attributes[a]] + elif a == 'ConfigurationType': + msbuild_attributes[a] = { + '1': 'Application', + '2': 'DynamicLibrary', + '4': 'StaticLibrary', + '10': 'Utility' + }[msvs_attributes[a]] + else: + print 'Warning: Do not know how to convert MSVS attribute ' + a + return msbuild_attributes + + +def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): + # TODO(jeanluc) We could optimize out the following and do it only if + # there are actions. + # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'. + new_paths = [] + cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0] + if cygwin_dirs: + cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs) + new_paths.append(cyg_path) + # TODO(jeanluc) Change the convention to have both a cygwin_dir and a + # python_dir. + python_path = cyg_path.replace('cygwin\\bin', 'python_26') + new_paths.append(python_path) + if new_paths: + new_paths = '$(ExecutablePath);' + ';'.join(new_paths) + + properties = {} + for (name, configuration) in sorted(configurations.iteritems()): + condition = _GetConfigurationCondition(name, configuration) + attributes = _GetMSBuildAttributes(spec, configuration, build_file) + msbuild_settings = configuration['finalized_msbuild_settings'] + _AddConditionalProperty(properties, condition, 'IntDir', + attributes['IntermediateDirectory']) + _AddConditionalProperty(properties, condition, 'OutDir', + attributes['OutputDirectory']) + if new_paths: + _AddConditionalProperty(properties, condition, 'ExecutablePath', + new_paths) + tool_settings = msbuild_settings.get('', {}) + for name, value in sorted(tool_settings.iteritems()): + formatted_value = _GetValueFormattedForMSBuild('', name, value) + _AddConditionalProperty(properties, condition, name, formatted_value) + return _GetMSBuildPropertyGroup(spec, None, properties) + + +def _AddConditionalProperty(properties, condition, name, value): + """Adds a property / conditional value pair to a dictionary. + + Arguments: + properties: The dictionary to be modified. The key is the name of the + property. The value is itself a dictionary; its key is the value and + the value a list of condition for which this value is true. + condition: The condition under which the named property has the value. + name: The name of the property. + value: The value of the property. + """ + if name not in properties: + properties[name] = {} + values = properties[name] + if value not in values: + values[value] = [] + conditions = values[value] + conditions.append(condition) + + +def _GetMSBuildPropertyGroup(spec, label, properties): + """Returns a PropertyGroup definition for the specified properties. + + Arguments: + spec: The target project dict. + label: An optional label for the PropertyGroup. + properties: The dictionary to be converted. The key is the name of the + property. The value is itself a dictionary; its key is the value and + the value a list of condition for which this value is true. + """ + group = ['PropertyGroup'] + if label: + group.append({'Label': label}) + num_configurations = len(spec['configurations']) + for name, values in sorted(properties.iteritems()): + for value, conditions in sorted(values.iteritems()): + if len(conditions) == num_configurations: + # If the value is the same all configurations, + # just add one unconditional entry. + group.append([name, value]) + else: + for condition in conditions: + group.append([name, {'Condition': condition}, value]) + return [group] + + +def _GetMSBuildToolSettingsSections(spec, configurations): + groups = [] + for (name, configuration) in sorted(configurations.iteritems()): + msbuild_settings = configuration['finalized_msbuild_settings'] + group = ['ItemDefinitionGroup', + {'Condition': _GetConfigurationCondition(name, configuration)} + ] + for tool_name, tool_settings in sorted(msbuild_settings.iteritems()): + # Skip the tool named '' which is a holder of global settings handled + # by _GetMSBuildConfigurationGlobalProperties. + if tool_name: + if tool_settings: + tool = [tool_name] + for name, value in sorted(tool_settings.iteritems()): + formatted_value = _GetValueFormattedForMSBuild(tool_name, name, + value) + tool.append([name, formatted_value]) + group.append(tool) + groups.append(group) + return groups + + +def _FinalizeMSBuildSettings(spec, configuration): + if 'msbuild_settings' in configuration: + converted = False + msbuild_settings = configuration['msbuild_settings'] + MSVSSettings.ValidateMSBuildSettings(msbuild_settings) + else: + converted = True + msvs_settings = configuration.get('msvs_settings', {}) + msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings) + include_dirs, resource_include_dirs = _GetIncludeDirs(configuration) + libraries = _GetLibraries(spec) + out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec) + defines = _GetDefines(configuration) + if converted: + # Visual Studio 2010 has TR1 + defines = [d for d in defines if d != '_HAS_TR1=0'] + # Warn of ignored settings + ignored_settings = ['msvs_prebuild', 'msvs_postbuild', 'msvs_tool_files'] + for ignored_setting in ignored_settings: + value = configuration.get(ignored_setting) + if value: + print ('Warning: The automatic conversion to MSBuild does not handle ' + '%s. Ignoring setting of %s' % (ignored_setting, str(value))) + + defines = [_EscapeCppDefineForMSBuild(d) for d in defines] + disabled_warnings = _GetDisabledWarnings(configuration) + # TODO(jeanluc) Validate & warn that we don't translate + # prebuild = configuration.get('msvs_prebuild') + # postbuild = configuration.get('msvs_postbuild') + def_file = _GetModuleDefinition(spec) + precompiled_header = configuration.get('msvs_precompiled_header') + + # Add the information to the appropriate tool + # TODO(jeanluc) We could optimize and generate these settings only if + # the corresponding files are found, e.g. don't generate ResourceCompile + # if you don't have any resources. + _ToolAppend(msbuild_settings, 'ClCompile', + 'AdditionalIncludeDirectories', include_dirs) + _ToolAppend(msbuild_settings, 'ResourceCompile', + 'AdditionalIncludeDirectories', resource_include_dirs) + # Add in libraries. + _ToolAppend(msbuild_settings, 'Link', 'AdditionalDependencies', libraries) + if out_file: + _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file, + only_if_unset=True) + # Add defines. + _ToolAppend(msbuild_settings, 'ClCompile', + 'PreprocessorDefinitions', defines) + _ToolAppend(msbuild_settings, 'ResourceCompile', + 'PreprocessorDefinitions', defines) + # Add disabled warnings. + _ToolAppend(msbuild_settings, 'ClCompile', + 'DisableSpecificWarnings', disabled_warnings) + # Turn on precompiled headers if appropriate. + if precompiled_header: + precompiled_header = os.path.split(precompiled_header)[1] + _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use') + _ToolAppend(msbuild_settings, 'ClCompile', + 'PrecompiledHeaderFile', precompiled_header) + _ToolAppend(msbuild_settings, 'ClCompile', + 'ForcedIncludeFiles', precompiled_header) + # Loadable modules don't generate import libraries; + # tell dependent projects to not expect one. + if spec['type'] == 'loadable_module': + _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true') + # Set the module definition file if any. + if def_file: + _ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file) + configuration['finalized_msbuild_settings'] = msbuild_settings + + +def _GetValueFormattedForMSBuild(tool_name, name, value): + if type(value) == list: + # For some settings, VS2010 does not automatically extends the settings + # TODO(jeanluc) Is this what we want? + if name in ['AdditionalDependencies', + 'AdditionalIncludeDirectories', + 'AdditionalLibraryDirectories', + 'AdditionalOptions', + 'DelayLoadDLLs', + 'DisableSpecificWarnings', + 'PreprocessorDefinitions']: + value.append('%%(%s)' % name) + # For most tools, entries in a list should be separated with ';' but some + # settings use a space. Check for those first. + exceptions = { + 'ClCompile': ['AdditionalOptions'], + 'Link': ['AdditionalOptions'], + 'Lib': ['AdditionalOptions']} + if tool_name in exceptions and name in exceptions[tool_name]: + char = ' ' + else: + char = ';' + formatted_value = char.join( + [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value]) + else: + formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value) + return formatted_value + + +def _VerifySourcesExist(sources, root_dir): + """Verifies that all source files exist on disk. + + Checks that all regular source files, i.e. not created at run time, + exist on disk. Missing files cause needless recompilation but no otherwise + visible errors. + + Arguments: + sources: A recursive list of Filter/file names. + root_dir: The root directory for the relative path names. + """ + for source in sources: + if isinstance(source, MSVSProject.Filter): + _VerifySourcesExist(source.contents, root_dir) + else: + if '$' not in source: + full_path = os.path.join(root_dir, source) + if not os.path.exists(full_path): + print 'Warning: Missing input file ' + full_path + ' pwd=' +\ + os.getcwd() + + +def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name, + actions_spec, sources_handled_by_action, list_excluded): + groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule'] + grouped_sources = {} + for g in groups: + grouped_sources[g] = [] + + _AddSources2(spec, sources, exclusions, grouped_sources, + extension_to_rule_name, sources_handled_by_action, list_excluded) + sources = [] + for g in groups: + if grouped_sources[g]: + sources.append(['ItemGroup'] + grouped_sources[g]) + if actions_spec: + sources.append(['ItemGroup'] + actions_spec) + return sources + + +def _AddSources2(spec, sources, exclusions, grouped_sources, + extension_to_rule_name, sources_handled_by_action, + list_excluded): + extensions_excluded_from_precompile = [] + for source in sources: + if isinstance(source, MSVSProject.Filter): + _AddSources2(spec, source.contents, exclusions, grouped_sources, + extension_to_rule_name, sources_handled_by_action, + list_excluded) + else: + if not source in sources_handled_by_action: + detail = [] + excluded_configurations = exclusions.get(source, []) + if len(excluded_configurations) == len(spec['configurations']): + detail.append(['ExcludedFromBuild', 'true']) + else: + for config_name, configuration in sorted(excluded_configurations): + condition = _GetConfigurationCondition(config_name, configuration) + detail.append(['ExcludedFromBuild', + {'Condition': condition}, + 'true']) + # Add precompile if needed + for config_name, configuration in spec['configurations'].iteritems(): + precompiled_source = configuration.get('msvs_precompiled_source', '') + if precompiled_source != '': + precompiled_source = _FixPath(precompiled_source) + if not extensions_excluded_from_precompile: + # If the precompiled header is generated by a C source, we must + # not try to use it for C++ sources, and vice versa. + basename, extension = os.path.splitext(precompiled_source) + if extension == '.c': + extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx'] + else: + extensions_excluded_from_precompile = ['.c'] + + if precompiled_source == source: + condition = _GetConfigurationCondition(config_name, configuration) + detail.append(['PrecompiledHeader', + {'Condition': condition}, + 'Create' + ]) + else: + # Turn off precompiled header usage for source files of a + # different type than the file that generated the + # precompiled header. + for extension in extensions_excluded_from_precompile: + if source.endswith(extension): + detail.append(['PrecompiledHeader', '']) + detail.append(['ForcedIncludeFiles', '']) + + group, element = _MapFileToMsBuildSourceType(source, + extension_to_rule_name) + grouped_sources[group].append([element, {'Include': source}] + detail) + + +def _GetMSBuildProjectReferences(project): + references = [] + if project.dependencies: + group = ['ItemGroup'] + for dependency in project.dependencies: + guid = dependency.guid + project_dir = os.path.split(project.path)[0] + relative_path = gyp.common.RelativePath(dependency.path, project_dir) + project_ref = ['ProjectReference', + {'Include': relative_path}, + ['Project', guid], + ['ReferenceOutputAssembly', 'false'] + ] + for config in dependency.spec.get('configurations', {}).itervalues(): + # If it's disabled in any config, turn it off in the reference. + if config.get('msvs_2010_disable_uldi_when_referenced', 0): + project_ref.append(['UseLibraryDependencyInputs', 'false']) + break + group.append(project_ref) + references.append(group) + return references + + +def _GenerateMSBuildProject(project, options, version, generator_flags): + spec = project.spec + configurations = spec['configurations'] + project_dir, project_file_name = os.path.split(project.path) + msbuildproj_dir = os.path.dirname(project.path) + if msbuildproj_dir and not os.path.exists(msbuildproj_dir): + os.makedirs(msbuildproj_dir) + # Prepare list of sources and excluded sources. + gyp_path = _NormalizedSource(project.build_file) + relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) + + gyp_file = os.path.split(project.build_file)[1] + sources, excluded_sources = _PrepareListOfSources(spec, gyp_file) + # Add rules. + actions_to_add = {} + props_files_of_rules = set() + targets_files_of_rules = set() + extension_to_rule_name = {} + list_excluded = generator_flags.get('msvs_list_excluded_files', True) + _GenerateRulesForMSBuild(project_dir, options, spec, + sources, excluded_sources, + props_files_of_rules, targets_files_of_rules, + actions_to_add, extension_to_rule_name) + sources, excluded_sources, excluded_idl = ( + _AdjustSourcesAndConvertToFilterHierarchy(spec, options, + project_dir, sources, + excluded_sources, + list_excluded)) + _AddActions(actions_to_add, spec, project.build_file) + _AddCopies(actions_to_add, spec) + + # NOTE: this stanza must appear after all actions have been decided. + # Don't excluded sources with actions attached, or they won't run. + excluded_sources = _FilterActionsFromExcluded( + excluded_sources, actions_to_add) + + exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) + actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild( + spec, actions_to_add) + + _GenerateMSBuildFiltersFile(project.path + '.filters', sources, + extension_to_rule_name) + _VerifySourcesExist(sources, project_dir) + + for (_, configuration) in configurations.iteritems(): + _FinalizeMSBuildSettings(spec, configuration) + + # Add attributes to root element + + import_default_section = [ + ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]] + import_cpp_props_section = [ + ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]] + import_cpp_targets_section = [ + ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]] + macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]] + + content = [ + 'Project', + {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003', + 'ToolsVersion': version.ProjectVersion(), + 'DefaultTargets': 'Build' + }] + + content += _GetMSBuildProjectConfigurations(configurations) + content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name) + content += import_default_section + content += _GetMSBuildConfigurationDetails(spec, project.build_file) + content += import_cpp_props_section + content += _GetMSBuildExtensions(props_files_of_rules) + content += _GetMSBuildPropertySheets(configurations) + content += macro_section + content += _GetMSBuildConfigurationGlobalProperties(spec, configurations, + project.build_file) + content += _GetMSBuildToolSettingsSections(spec, configurations) + content += _GetMSBuildSources( + spec, sources, exclusions, extension_to_rule_name, actions_spec, + sources_handled_by_action, list_excluded) + content += _GetMSBuildProjectReferences(project) + content += import_cpp_targets_section + content += _GetMSBuildExtensionTargets(targets_files_of_rules) + + # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS: + # has_run_as = _WriteMSVSUserFile(project.path, version, spec) + + easy_xml.WriteXmlIfChanged(content, project.path) + + +def _GetMSBuildExtensions(props_files_of_rules): + extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}] + for props_file in props_files_of_rules: + extensions.append(['Import', {'Project': props_file}]) + return [extensions] + + +def _GetMSBuildExtensionTargets(targets_files_of_rules): + targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}] + for targets_file in sorted(targets_files_of_rules): + targets_node.append(['Import', {'Project': targets_file}]) + return [targets_node] + + +def _GenerateActionsForMSBuild(spec, actions_to_add): + """Add actions accumulated into an actions_to_add, merging as needed. + + Arguments: + spec: the target project dict + actions_to_add: dictionary keyed on input name, which maps to a list of + dicts describing the actions attached to that input file. + + Returns: + A pair of (action specification, the sources handled by this action). + """ + sources_handled_by_action = set() + actions_spec = [] + for primary_input, actions in actions_to_add.iteritems(): + inputs = set() + outputs = set() + descriptions = [] + commands = [] + for action in actions: + inputs.update(set(action['inputs'])) + outputs.update(set(action['outputs'])) + descriptions.append(action['description']) + cmd = action['command'] + # For most actions, add 'call' so that actions that invoke batch files + # return and continue executing. msbuild_use_call provides a way to + # disable this but I have not seen any adverse effect from doing that + # for everything. + if action.get('msbuild_use_call', True): + cmd = 'call ' + cmd + commands.append(cmd) + # Add the custom build action for one input file. + description = ', and also '.join(descriptions) + command = ' && '.join(commands) + _AddMSBuildAction(spec, + primary_input, + inputs, + outputs, + command, + description, + sources_handled_by_action, + actions_spec) + return actions_spec, sources_handled_by_action + + +def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description, + sources_handled_by_action, actions_spec): + command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd) + primary_input = _FixPath(primary_input) + inputs_array = _FixPaths(inputs) + outputs_array = _FixPaths(outputs) + additional_inputs = ';'.join([i for i in inputs_array + if i != primary_input]) + outputs = ';'.join(outputs_array) + sources_handled_by_action.add(primary_input) + action_spec = ['CustomBuild', {'Include': primary_input}] + action_spec.extend( + # TODO(jeanluc) 'Document' for all or just if as_sources? + [['FileType', 'Document'], + ['Command', command], + ['Message', description], + ['Outputs', outputs] + ]) + if additional_inputs: + action_spec.append(['AdditionalInputs', additional_inputs]) + actions_spec.append(action_spec) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/msvs_test.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/msvs_test.py new file mode 100755 index 0000000000..5a69c1c288 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/msvs_test.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python + +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Unit tests for the msvs.py file. """ + +import gyp.generator.msvs as msvs +import unittest +import StringIO + + +class TestSequenceFunctions(unittest.TestCase): + + def setUp(self): + self.stderr = StringIO.StringIO() + + def test_GetLibraries(self): + self.assertEqual( + msvs._GetLibraries({}), + []) + self.assertEqual( + msvs._GetLibraries({'libraries': []}), + []) + self.assertEqual( + msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}), + ['a.lib']) + self.assertEqual( + msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib', + '-lb.lib', 'd.lib', 'a.lib']}), + ['c.lib', 'b.lib', 'd.lib', 'a.lib']) + +if __name__ == '__main__': + unittest.main() diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/ninja.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/ninja.py new file mode 100644 index 0000000000..8d6c6f52d1 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/ninja.py @@ -0,0 +1,1256 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import copy +import gyp +import gyp.common +import gyp.system_test +import gyp.xcode_emulation +import os.path +import re +import subprocess +import sys + +import gyp.ninja_syntax as ninja_syntax + +generator_default_variables = { + 'EXECUTABLE_PREFIX': '', + 'EXECUTABLE_SUFFIX': '', + 'STATIC_LIB_PREFIX': 'lib', + 'STATIC_LIB_SUFFIX': '.a', + 'SHARED_LIB_PREFIX': 'lib', + + # Gyp expects the following variables to be expandable by the build + # system to the appropriate locations. Ninja prefers paths to be + # known at gyp time. To resolve this, introduce special + # variables starting with $! (which begin with a $ so gyp knows it + # should be treated as a path, but is otherwise an invalid + # ninja/shell variable) that are passed to gyp here but expanded + # before writing out into the target .ninja files; see + # ExpandSpecial. + 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR', + 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen', + 'PRODUCT_DIR': '$!PRODUCT_DIR', + + # Special variables that may be used by gyp 'rule' targets. + # We generate definitions for these variables on the fly when processing a + # rule. + 'RULE_INPUT_ROOT': '${root}', + 'RULE_INPUT_DIRNAME': '${dirname}', + 'RULE_INPUT_PATH': '${source}', + 'RULE_INPUT_EXT': '${ext}', + 'RULE_INPUT_NAME': '${name}', +} + +# TODO: enable cross compiling once we figure out: +# - how to not build extra host objects in the non-cross-compile case. +# - how to decide what the host compiler is (should not just be $cc). +# - need ld_host as well. +generator_supports_multiple_toolsets = False + + +def StripPrefix(arg, prefix): + if arg.startswith(prefix): + return arg[len(prefix):] + return arg + + +def QuoteShellArgument(arg): + """Quote a string such that it will be interpreted as a single argument + by the shell.""" + # Rather than attempting to enumerate the bad shell characters, just + # whitelist common OK ones and quote anything else. + if re.match(r'^[a-zA-Z0-9_=-]+$', arg): + return arg # No quoting necessary. + return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" + + +def InvertRelativePath(path): + """Given a relative path like foo/bar, return the inverse relative path: + the path from the relative path back to the origin dir. + + E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path))) + should always produce the empty string.""" + + if not path: + return path + # Only need to handle relative paths into subdirectories for now. + assert '..' not in path, path + depth = len(path.split(os.path.sep)) + return os.path.sep.join(['..'] * depth) + + +class Target: + """Target represents the paths used within a single gyp target. + + Conceptually, building a single target A is a series of steps: + + 1) actions/rules/copies generates source/resources/etc. + 2) compiles generates .o files + 3) link generates a binary (library/executable) + 4) bundle merges the above in a mac bundle + + (Any of these steps can be optional.) + + From a build ordering perspective, a dependent target B could just + depend on the last output of this series of steps. + + But some dependent commands sometimes need to reach inside the box. + For example, when linking B it needs to get the path to the static + library generated by A. + + This object stores those paths. To keep things simple, member + variables only store concrete paths to single files, while methods + compute derived values like "the last output of the target". + """ + def __init__(self, type): + # Gyp type ("static_library", etc.) of this target. + self.type = type + # File representing whether any input dependencies necessary for + # dependent actions have completed. + self.preaction_stamp = None + # File representing whether any input dependencies necessary for + # dependent compiles have completed. + self.precompile_stamp = None + # File representing the completion of actions/rules/copies, if any. + self.actions_stamp = None + # Path to the output of the link step, if any. + self.binary = None + # Path to the file representing the completion of building the bundle, + # if any. + self.bundle = None + + def Linkable(self): + """Return true if this is a target that can be linked against.""" + return self.type in ('static_library', 'shared_library') + + def PreActionInput(self): + """Return the path, if any, that should be used as a dependency of + any dependent action step.""" + return self.FinalOutput() or self.preaction_stamp + + def PreCompileInput(self): + """Return the path, if any, that should be used as a dependency of + any dependent compile step.""" + return self.actions_stamp or self.precompile_stamp + + def FinalOutput(self): + """Return the last output of the target, which depends on all prior + steps.""" + return self.bundle or self.binary or self.actions_stamp + + +# A small discourse on paths as used within the Ninja build: +# All files we produce (both at gyp and at build time) appear in the +# build directory (e.g. out/Debug). +# +# Paths within a given .gyp file are always relative to the directory +# containing the .gyp file. Call these "gyp paths". This includes +# sources as well as the starting directory a given gyp rule/action +# expects to be run from. We call the path from the source root to +# the gyp file the "base directory" within the per-.gyp-file +# NinjaWriter code. +# +# All paths as written into the .ninja files are relative to the build +# directory. Call these paths "ninja paths". +# +# We translate between these two notions of paths with two helper +# functions: +# +# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file) +# into the equivalent ninja path. +# +# - GypPathToUniqueOutput translates a gyp path into a ninja path to write +# an output file; the result can be namespaced such that is unique +# to the input file name as well as the output target name. + +class NinjaWriter: + def __init__(self, target_outputs, base_dir, build_dir, output_file, flavor, + abs_build_dir=None): + """ + base_dir: path from source root to directory containing this gyp file, + by gyp semantics, all input paths are relative to this + build_dir: path from source root to build output + abs_build_dir: absolute path to the build directory + """ + + self.target_outputs = target_outputs + self.base_dir = base_dir + self.build_dir = build_dir + self.ninja = ninja_syntax.Writer(output_file) + self.flavor = flavor + self.abs_build_dir = abs_build_dir + self.obj_ext = '.obj' if flavor == 'win' else '.o' + + # Relative path from build output dir to base dir. + self.build_to_base = os.path.join(InvertRelativePath(build_dir), base_dir) + # Relative path from base dir to build dir. + self.base_to_build = os.path.join(InvertRelativePath(base_dir), build_dir) + + def ExpandSpecial(self, path, product_dir=None): + """Expand specials like $!PRODUCT_DIR in |path|. + + If |product_dir| is None, assumes the cwd is already the product + dir. Otherwise, |product_dir| is the relative path to the product + dir. + """ + + PRODUCT_DIR = '$!PRODUCT_DIR' + if PRODUCT_DIR in path: + if product_dir: + path = path.replace(PRODUCT_DIR, product_dir) + else: + path = path.replace(PRODUCT_DIR + '/', '') + path = path.replace(PRODUCT_DIR + '\\', '') + path = path.replace(PRODUCT_DIR, '.') + + INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR' + if INTERMEDIATE_DIR in path: + int_dir = self.GypPathToUniqueOutput('gen') + # GypPathToUniqueOutput generates a path relative to the product dir, + # so insert product_dir in front if it is provided. + path = path.replace(INTERMEDIATE_DIR, + os.path.join(product_dir or '', int_dir)) + + return os.path.normpath(path) + + def ExpandRuleVariables(self, path, root, dirname, source, ext, name): + path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root) + path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'], + dirname) + path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source) + path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext) + path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name) + return path + + def GypPathToNinja(self, path, env=None): + """Translate a gyp path to a ninja path, optionally expanding environment + variable references in |path| with |env|. + + See the above discourse on path conversions.""" + if env: + path = gyp.xcode_emulation.ExpandEnvVars(path, env) + if path.startswith('$!'): + return self.ExpandSpecial(path) + assert '$' not in path, path + return os.path.normpath(os.path.join(self.build_to_base, path)) + + def GypPathToUniqueOutput(self, path, qualified=True): + """Translate a gyp path to a ninja path for writing output. + + If qualified is True, qualify the resulting filename with the name + of the target. This is necessary when e.g. compiling the same + path twice for two separate output targets. + + See the above discourse on path conversions.""" + + path = self.ExpandSpecial(path) + assert not path.startswith('$'), path + + # Translate the path following this scheme: + # Input: foo/bar.gyp, target targ, references baz/out.o + # Output: obj/foo/baz/targ.out.o (if qualified) + # obj/foo/baz/out.o (otherwise) + # (and obj.host instead of obj for cross-compiles) + # + # Why this scheme and not some other one? + # 1) for a given input, you can compute all derived outputs by matching + # its path, even if the input is brought via a gyp file with '..'. + # 2) simple files like libraries and stamps have a simple filename. + + obj = 'obj' + if self.toolset != 'target': + obj += '.' + self.toolset + + path_dir, path_basename = os.path.split(path) + if qualified: + path_basename = self.name + '.' + path_basename + return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, + path_basename)) + + def WriteCollapsedDependencies(self, name, targets): + """Given a list of targets, return a path for a single file + representing the result of building all the targets or None. + + Uses a stamp file if necessary.""" + + assert targets == filter(None, targets), targets + if len(targets) == 0: + return None + if len(targets) > 1: + stamp = self.GypPathToUniqueOutput(name + '.stamp') + targets = self.ninja.build(stamp, 'stamp', targets) + self.ninja.newline() + return targets[0] + + def WriteSpec(self, spec, config_name): + """The main entry point for NinjaWriter: write the build rules for a spec. + + Returns a Target object, which represents the output paths for this spec. + Returns None if there are no outputs (e.g. a settings-only 'none' type + target).""" + + self.config_name = config_name + self.name = spec['target_name'] + self.toolset = spec['toolset'] + config = spec['configurations'][config_name] + self.target = Target(spec['type']) + + self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) + if self.flavor == 'mac': + self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) + else: + self.xcode_settings = None + + # Compute predepends for all rules. + # actions_depends is the dependencies this target depends on before running + # any of its action/rule/copy steps. + # compile_depends is the dependencies this target depends on before running + # any of its compile steps. + actions_depends = [] + compile_depends = [] + # TODO(evan): it is rather confusing which things are lists and which + # are strings. Fix these. + if 'dependencies' in spec: + for dep in spec['dependencies']: + if dep in self.target_outputs: + target = self.target_outputs[dep] + actions_depends.append(target.PreActionInput()) + compile_depends.append(target.PreCompileInput()) + actions_depends = filter(None, actions_depends) + compile_depends = filter(None, compile_depends) + actions_depends = self.WriteCollapsedDependencies('actions_depends', + actions_depends) + compile_depends = self.WriteCollapsedDependencies('compile_depends', + compile_depends) + self.target.preaction_stamp = actions_depends + self.target.precompile_stamp = compile_depends + + # Write out actions, rules, and copies. These must happen before we + # compile any sources, so compute a list of predependencies for sources + # while we do it. + extra_sources = [] + mac_bundle_depends = [] + self.target.actions_stamp = self.WriteActionsRulesCopies( + spec, extra_sources, actions_depends, mac_bundle_depends) + + # If we have actions/rules/copies, we depend directly on those, but + # otherwise we depend on dependent target's actions/rules/copies etc. + # We never need to explicitly depend on previous target's link steps, + # because no compile ever depends on them. + compile_depends_stamp = (self.target.actions_stamp or compile_depends) + + # Write out the compilation steps, if any. + link_deps = [] + sources = spec.get('sources', []) + extra_sources + if sources: + link_deps = self.WriteSources( + config_name, config, sources, compile_depends_stamp, + gyp.xcode_emulation.MacPrefixHeader( + self.xcode_settings, self.GypPathToNinja, + lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))) + # Some actions/rules output 'sources' that are already object files. + link_deps += [self.GypPathToNinja(f) + for f in sources if f.endswith(self.obj_ext)] + + # Write out a link step, if needed. + output = None + if link_deps or self.target.actions_stamp or actions_depends: + output = self.WriteTarget(spec, config_name, config, link_deps, + self.target.actions_stamp or actions_depends) + if self.is_mac_bundle: + mac_bundle_depends.append(output) + + # Bundle all of the above together, if needed. + if self.is_mac_bundle: + output = self.WriteMacBundle(spec, mac_bundle_depends) + + if not output: + return None + + if self.name != output and self.toolset == 'target': + # Write a short name to build this target. This benefits both the + # "build chrome" case as well as the gyp tests, which expect to be + # able to run actions and build libraries by their short name. + self.ninja.build(self.name, 'phony', output) + + assert self.target.FinalOutput(), output + return self.target + + def WriteActionsRulesCopies(self, spec, extra_sources, prebuild, + mac_bundle_depends): + """Write out the Actions, Rules, and Copies steps. Return a path + representing the outputs of these steps.""" + outputs = [] + extra_mac_bundle_resources = [] + + if 'actions' in spec: + outputs += self.WriteActions(spec['actions'], extra_sources, prebuild, + extra_mac_bundle_resources) + if 'rules' in spec: + outputs += self.WriteRules(spec['rules'], extra_sources, prebuild, + extra_mac_bundle_resources) + if 'copies' in spec: + outputs += self.WriteCopies(spec['copies'], prebuild) + + stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs) + + if self.is_mac_bundle: + mac_bundle_resources = spec.get('mac_bundle_resources', []) + \ + extra_mac_bundle_resources + self.WriteMacBundleResources(mac_bundle_resources, mac_bundle_depends) + self.WriteMacInfoPlist(mac_bundle_depends) + + return stamp + + def GenerateDescription(self, verb, message, fallback): + """Generate and return a description of a build step. + + |verb| is the short summary, e.g. ACTION or RULE. + |message| is a hand-written description, or None if not available. + |fallback| is the gyp-level name of the step, usable as a fallback. + """ + if self.toolset != 'target': + verb += '(%s)' % self.toolset + if message: + return '%s %s' % (verb, self.ExpandSpecial(message)) + else: + return '%s %s: %s' % (verb, self.name, fallback) + + def WriteActions(self, actions, extra_sources, prebuild, + extra_mac_bundle_resources): + # Actions cd into the base directory. + env = self.GetXcodeEnv() + all_outputs = [] + for action in actions: + # First write out a rule for the action. + name = re.sub(r'[ {}$]', '_', action['action_name']) + description = self.GenerateDescription('ACTION', + action.get('message', None), + name) + rule_name = self.WriteNewNinjaRule(name, action['action'], description, + env=env) + + inputs = [self.GypPathToNinja(i, env) for i in action['inputs']] + if int(action.get('process_outputs_as_sources', False)): + extra_sources += action['outputs'] + if int(action.get('process_outputs_as_mac_bundle_resources', False)): + extra_mac_bundle_resources += action['outputs'] + outputs = [self.GypPathToNinja(o, env) for o in action['outputs']] + + # Then write out an edge using the rule. + self.ninja.build(outputs, rule_name, inputs, + order_only=prebuild) + all_outputs += outputs + + self.ninja.newline() + + return all_outputs + + def WriteRules(self, rules, extra_sources, prebuild, + extra_mac_bundle_resources): + all_outputs = [] + for rule in rules: + # First write out a rule for the rule action. + name = rule['rule_name'] + args = rule['action'] + description = self.GenerateDescription( + 'RULE', + rule.get('message', None), + ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name) + rule_name = self.WriteNewNinjaRule(name, args, description) + + # TODO: if the command references the outputs directly, we should + # simplify it to just use $out. + + # Rules can potentially make use of some special variables which + # must vary per source file. + # Compute the list of variables we'll need to provide. + special_locals = ('source', 'root', 'dirname', 'ext', 'name') + needed_variables = set(['source']) + for argument in args: + for var in special_locals: + if ('${%s}' % var) in argument: + needed_variables.add(var) + + # For each source file, write an edge that generates all the outputs. + for source in rule.get('rule_sources', []): + dirname, basename = os.path.split(source) + root, ext = os.path.splitext(basename) + + # Gather the list of outputs, expanding $vars if possible. + outputs = [] + for output in rule['outputs']: + outputs.append(self.ExpandRuleVariables(output, root, dirname, + source, ext, basename)) + + if int(rule.get('process_outputs_as_sources', False)): + extra_sources += outputs + if int(rule.get('process_outputs_as_mac_bundle_resources', False)): + extra_mac_bundle_resources += outputs + + extra_bindings = [] + for var in needed_variables: + if var == 'root': + extra_bindings.append(('root', root)) + elif var == 'dirname': + extra_bindings.append(('dirname', dirname)) + elif var == 'source': + # '$source' is a parameter to the rule action, which means + # it shouldn't be converted to a Ninja path. But we don't + # want $!PRODUCT_DIR in there either. + source_expanded = self.ExpandSpecial(source, self.base_to_build) + extra_bindings.append(('source', source_expanded)) + elif var == 'ext': + extra_bindings.append(('ext', ext)) + elif var == 'name': + extra_bindings.append(('name', basename)) + else: + assert var == None, repr(var) + + inputs = map(self.GypPathToNinja, rule.get('inputs', [])) + outputs = map(self.GypPathToNinja, outputs) + self.ninja.build(outputs, rule_name, self.GypPathToNinja(source), + implicit=inputs, + order_only=prebuild, + variables=extra_bindings) + + all_outputs.extend(outputs) + + return all_outputs + + def WriteCopies(self, copies, prebuild): + outputs = [] + env = self.GetXcodeEnv() + for copy in copies: + for path in copy['files']: + # Normalize the path so trailing slashes don't confuse us. + path = os.path.normpath(path) + basename = os.path.split(path)[1] + src = self.GypPathToNinja(path, env) + dst = self.GypPathToNinja(os.path.join(copy['destination'], basename), + env) + outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild) + + return outputs + + def WriteMacBundleResources(self, resources, bundle_depends): + """Writes ninja edges for 'mac_bundle_resources'.""" + for output, res in gyp.xcode_emulation.GetMacBundleResources( + self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), + self.xcode_settings, map(self.GypPathToNinja, resources)): + self.ninja.build(output, 'mac_tool', res, + variables=[('mactool_cmd', 'copy-bundle-resource')]) + bundle_depends.append(output) + + def WriteMacInfoPlist(self, bundle_depends): + """Write build rules for bundle Info.plist files.""" + info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( + self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), + self.xcode_settings, self.GypPathToNinja) + if not info_plist: + return + if defines: + # Create an intermediate file to store preprocessed results. + intermediate_plist = self.GypPathToUniqueOutput( + os.path.basename(info_plist)) + defines = ' '.join( + [QuoteShellArgument(ninja_syntax.escape('-D' + d)) for d in defines]) + info_plist = self.ninja.build(intermediate_plist, 'infoplist', info_plist, + variables=[('defines',defines)]) + + env = self.GetXcodeEnv(additional_settings=extra_env) + env = self.ComputeExportEnvString(env) + + self.ninja.build(out, 'mac_tool', info_plist, + variables=[('mactool_cmd', 'copy-info-plist'), + ('env', env)]) + bundle_depends.append(out) + + def WriteSources(self, config_name, config, sources, predepends, + precompiled_header): + """Write build rules to compile all of |sources|.""" + if self.toolset == 'host': + self.ninja.variable('cc', '$cc_host') + self.ninja.variable('cxx', '$cxx_host') + + if self.flavor == 'mac': + cflags = self.xcode_settings.GetCflags(config_name) + cflags_c = self.xcode_settings.GetCflagsC(config_name) + cflags_cc = self.xcode_settings.GetCflagsCC(config_name) + cflags_objc = ['$cflags_c'] + \ + self.xcode_settings.GetCflagsObjC(config_name) + cflags_objcc = ['$cflags_cc'] + \ + self.xcode_settings.GetCflagsObjCC(config_name) + else: + cflags = config.get('cflags', []) + cflags_c = config.get('cflags_c', []) + cflags_cc = config.get('cflags_cc', []) + + self.WriteVariableList('defines', + [QuoteShellArgument(ninja_syntax.escape('-D' + d)) + for d in config.get('defines', [])]) + self.WriteVariableList('includes', + ['-I' + self.GypPathToNinja(i) + for i in config.get('include_dirs', [])]) + + pch_commands = precompiled_header.GetGchBuildCommands() + if self.flavor == 'mac': + self.WriteVariableList('cflags_pch_c', + [precompiled_header.GetInclude('c')]) + self.WriteVariableList('cflags_pch_cc', + [precompiled_header.GetInclude('cc')]) + self.WriteVariableList('cflags_pch_objc', + [precompiled_header.GetInclude('m')]) + self.WriteVariableList('cflags_pch_objcc', + [precompiled_header.GetInclude('mm')]) + + self.WriteVariableList('cflags', map(self.ExpandSpecial, cflags)) + self.WriteVariableList('cflags_c', map(self.ExpandSpecial, cflags_c)) + self.WriteVariableList('cflags_cc', map(self.ExpandSpecial, cflags_cc)) + if self.flavor == 'mac': + self.WriteVariableList('cflags_objc', map(self.ExpandSpecial, + cflags_objc)) + self.WriteVariableList('cflags_objcc', map(self.ExpandSpecial, + cflags_objcc)) + self.ninja.newline() + outputs = [] + for source in sources: + filename, ext = os.path.splitext(source) + ext = ext[1:] + if ext in ('cc', 'cpp', 'cxx'): + command = 'cxx' + elif ext in ('c', 's', 'S'): + command = 'cc' + elif self.flavor == 'mac' and ext == 'm': + command = 'objc' + elif self.flavor == 'mac' and ext == 'mm': + command = 'objcxx' + else: + # TODO: should we assert here on unexpected extensions? + continue + input = self.GypPathToNinja(source) + output = self.GypPathToUniqueOutput(filename + self.obj_ext) + implicit = precompiled_header.GetObjDependencies([input], [output]) + self.ninja.build(output, command, input, + implicit=[gch for _, _, gch in implicit], + order_only=predepends) + outputs.append(output) + + self.WritePchTargets(pch_commands) + + self.ninja.newline() + return outputs + + def WritePchTargets(self, pch_commands): + """Writes ninja rules to compile prefix headers.""" + if not pch_commands: + return + + for gch, lang_flag, lang, input in pch_commands: + var_name = { + 'c': 'cflags_pch_c', + 'cc': 'cflags_pch_cc', + 'm': 'cflags_pch_objc', + 'mm': 'cflags_pch_objcc', + }[lang] + + cmd = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }.get(lang) + self.ninja.build(gch, cmd, input, variables=[(var_name, lang_flag)]) + + + def WriteLink(self, spec, config_name, config, link_deps): + """Write out a link step. Returns the path to the output.""" + + command = { + 'executable': 'link', + 'loadable_module': 'solink_module', + 'shared_library': 'solink', + }[spec['type']] + + implicit_deps = set() + + if 'dependencies' in spec: + # Two kinds of dependencies: + # - Linkable dependencies (like a .a or a .so): add them to the link line. + # - Non-linkable dependencies (like a rule that generates a file + # and writes a stamp file): add them to implicit_deps + extra_link_deps = set() + for dep in spec['dependencies']: + target = self.target_outputs.get(dep) + if not target: + continue + linkable = target.Linkable() + if linkable: + extra_link_deps.add(target.binary) + + final_output = target.FinalOutput() + if not linkable or final_output != target.binary: + implicit_deps.add(final_output) + + link_deps.extend(list(extra_link_deps)) + + extra_bindings = [] + if self.is_mac_bundle: + output = self.ComputeMacBundleBinaryOutput() + else: + output = self.ComputeOutput(spec) + extra_bindings.append(('postbuilds', + self.GetPostbuildCommand(spec, output, output))) + + if self.flavor == 'mac': + ldflags = self.xcode_settings.GetLdflags(config_name, + self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), + self.GypPathToNinja) + else: + ldflags = config.get('ldflags', []) + self.WriteVariableList('ldflags', + gyp.common.uniquer(map(self.ExpandSpecial, + ldflags))) + + libraries = gyp.common.uniquer(map(self.ExpandSpecial, + spec.get('libraries', []))) + if self.flavor == 'mac': + libraries = self.xcode_settings.AdjustLibraries(libraries) + self.WriteVariableList('libs', libraries) + + if command in ('solink', 'solink_module'): + extra_bindings.append(('soname', os.path.split(output)[1])) + + self.ninja.build(output, command, link_deps, + implicit=list(implicit_deps), + variables=extra_bindings) + return output + + def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): + if spec['type'] == 'none': + # TODO(evan): don't call this function for 'none' target types, as + # it doesn't do anything, and we fake out a 'binary' with a stamp file. + self.target.binary = compile_deps + elif spec['type'] == 'static_library': + self.target.binary = self.ComputeOutput(spec) + self.ninja.build(self.target.binary, 'alink', link_deps, + order_only=compile_deps, + variables=[('postbuilds', self.GetPostbuildCommand( + spec, self.target.binary, self.target.binary))]) + else: + self.target.binary = self.WriteLink(spec, config_name, config, link_deps) + return self.target.binary + + def WriteMacBundle(self, spec, mac_bundle_depends): + assert self.is_mac_bundle + package_framework = spec['type'] in ('shared_library', 'loadable_module') + output = self.ComputeMacBundleOutput() + postbuild = self.GetPostbuildCommand(spec, output, self.target.binary, + is_command_start=not package_framework) + variables = [] + if postbuild: + variables.append(('postbuilds', postbuild)) + if package_framework: + variables.append(('version', self.xcode_settings.GetFrameworkVersion())) + self.ninja.build(output, 'package_framework', mac_bundle_depends, + variables=variables) + else: + self.ninja.build(output, 'stamp', mac_bundle_depends, + variables=variables) + self.target.bundle = output + return output + + def GetXcodeEnv(self, additional_settings=None): + """Returns the variables Xcode would set for build steps.""" + assert self.abs_build_dir + abs_build_dir = self.abs_build_dir + return gyp.xcode_emulation.GetXcodeEnv( + self.xcode_settings, abs_build_dir, + os.path.join(abs_build_dir, self.build_to_base), self.config_name, + additional_settings) + + def GetXcodePostbuildEnv(self): + """Returns the variables Xcode would set for postbuild steps.""" + postbuild_settings = {} + # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. + # TODO(thakis): It would be nice to have some general mechanism instead. + strip_save_file = self.xcode_settings.GetPerTargetSetting( + 'CHROMIUM_STRIP_SAVE_FILE') + if strip_save_file: + postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = self.GypPathToNinja( + strip_save_file) + return self.GetXcodeEnv(additional_settings=postbuild_settings) + + def GetPostbuildCommand(self, spec, output, output_binary, + is_command_start=False): + """Returns a shell command that runs all the postbuilds, and removes + |output| if any of them fails. If |is_command_start| is False, then the + returned string will start with ' && '.""" + if not self.xcode_settings or spec['type'] == 'none' or not output: + return '' + output = QuoteShellArgument(output) + target_postbuilds = self.xcode_settings.GetTargetPostbuilds( + self.config_name, output, QuoteShellArgument(output_binary), quiet=True) + postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands( + spec, self.GypPathToNinja, quiet=True) + postbuilds = target_postbuilds + postbuilds + if not postbuilds: + return '' + env = self.ComputeExportEnvString(self.GetXcodePostbuildEnv()) + commands = env + ' F=0; ' + \ + ' '.join([ninja_syntax.escape(command) + ' || F=$$?;' + for command in postbuilds]) + command_string = env + commands + ' ((exit $$F) || rm -rf %s) ' % output + \ + '&& exit $$F)' + if is_command_start: + return '(' + command_string + ' && ' + else: + return '$ && (' + command_string + + def ComputeExportEnvString(self, env): + """Given an environment, returns a string looking like + 'export FOO=foo; export BAR="${FOO} bar;' + that exports |env| to the shell.""" + export_str = [] + for k in gyp.xcode_emulation.TopologicallySortedEnvVarKeys(env): + export_str.append('export %s=%s;' % + (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(env[k])))) + return ' '.join(export_str) + + def ComputeMacBundleOutput(self): + """Return the 'output' (full output path) to a bundle output directory.""" + assert self.is_mac_bundle + path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']) + return os.path.join(path, self.xcode_settings.GetWrapperName()) + + def ComputeMacBundleBinaryOutput(self): + """Return the 'output' (full output path) to the binary in a bundle.""" + assert self.is_mac_bundle + path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']) + return os.path.join(path, self.xcode_settings.GetExecutablePath()) + + def ComputeOutputFileName(self, spec, type=None): + """Compute the filename of the final output for the current target.""" + if not type: + type = spec['type'] + + default_variables = copy.copy(generator_default_variables) + CalculateVariables(default_variables, {'flavor': self.flavor}) + + # Compute filename prefix: the product prefix, or a default for + # the product type. + DEFAULT_PREFIX = { + 'loadable_module': default_variables['SHARED_LIB_PREFIX'], + 'shared_library': default_variables['SHARED_LIB_PREFIX'], + 'static_library': default_variables['STATIC_LIB_PREFIX'], + 'executable': default_variables['EXECUTABLE_PREFIX'], + } + prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, '')) + + # Compute filename extension: the product extension, or a default + # for the product type. + DEFAULT_EXTENSION = { + 'loadable_module': default_variables['SHARED_LIB_SUFFIX'], + 'shared_library': default_variables['SHARED_LIB_SUFFIX'], + 'static_library': default_variables['STATIC_LIB_SUFFIX'], + 'executable': default_variables['EXECUTABLE_SUFFIX'], + } + extension = spec.get('product_extension') + if extension: + extension = '.' + extension + else: + extension = DEFAULT_EXTENSION.get(type, '') + + if 'product_name' in spec: + # If we were given an explicit name, use that. + target = spec['product_name'] + else: + # Otherwise, derive a name from the target name. + target = spec['target_name'] + if prefix == 'lib': + # Snip out an extra 'lib' from libs if appropriate. + target = StripPrefix(target, 'lib') + + if type in ('static_library', 'loadable_module', 'shared_library', + 'executable'): + return '%s%s%s' % (prefix, target, extension) + elif type == 'none': + return '%s.stamp' % target + else: + raise 'Unhandled output type', type + + def ComputeOutput(self, spec, type=None): + """Compute the path for the final output of the spec.""" + assert not self.is_mac_bundle or type + + if not type: + type = spec['type'] + + if self.flavor == 'mac' and type in ( + 'static_library', 'executable', 'shared_library', 'loadable_module'): + filename = self.xcode_settings.GetExecutablePath() + else: + filename = self.ComputeOutputFileName(spec, type) + + if 'product_dir' in spec: + path = os.path.join(spec['product_dir'], filename) + return self.ExpandSpecial(path) + + # Some products go into the output root, libraries go into shared library + # dir, and everything else goes into the normal place. + type_in_output_root = ['executable', 'loadable_module'] + if self.flavor == 'mac' and self.toolset == 'target': + type_in_output_root += ['shared_library', 'static_library'] + + if type in type_in_output_root: + return filename + elif type == 'shared_library': + libdir = 'lib' + if self.toolset != 'target': + libdir = os.path.join('lib', '%s' % self.toolset) + return os.path.join(libdir, filename) + else: + return self.GypPathToUniqueOutput(filename, qualified=False) + + def WriteVariableList(self, var, values): + if values is None: + values = [] + self.ninja.variable(var, ' '.join(values)) + + def WriteNewNinjaRule(self, name, args, description, env={}): + """Write out a new ninja "rule" statement for a given command. + + Returns the name of the new rule.""" + + # TODO: we shouldn't need to qualify names; we do it because + # currently the ninja rule namespace is global, but it really + # should be scoped to the subninja. + rule_name = self.name + if self.toolset == 'target': + rule_name += '.' + self.toolset + rule_name += '.' + name + rule_name = rule_name.replace(' ', '_') + + args = args[:] + + # gyp dictates that commands are run from the base directory. + # cd into the directory before running, and adjust paths in + # the arguments to point to the proper locations. + if self.flavor == 'win': + cd = 'cmd /s /c "cd %s && ' % self.build_to_base + else: + cd = 'cd %s; ' % self.build_to_base + args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args] + env = self.ComputeExportEnvString(env) + if self.flavor == 'win': + # TODO(scottmg): Really don't want encourage cygwin, but I'm not sure + # how much sh is depended upon. For now, double quote args to make most + # things work. + command = args[0] + ' "' + '" "'.join(args[1:]) + '""' + else: + command = gyp.common.EncodePOSIXShellList(args) + if env: + # If an environment is passed in, variables in the command should be + # read from it, instead of from ninja's internal variables. + command = ninja_syntax.escape(command) + + command = cd + env + command + # GYP rules/actions express being no-ops by not touching their outputs. + # Avoid executing downstream dependencies in this case by specifying + # restat=1 to ninja. + self.ninja.rule(rule_name, command, description, restat=True) + self.ninja.newline() + + return rule_name + + +def CalculateVariables(default_variables, params): + """Calculate additional variables for use in the build (called by gyp).""" + cc_target = os.environ.get('CC.target', os.environ.get('CC', 'cc')) + flavor = gyp.common.GetFlavor(params) + if flavor == 'mac': + default_variables.setdefault('OS', 'mac') + default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib') + default_variables.setdefault('SHARED_LIB_DIR', + generator_default_variables['PRODUCT_DIR']) + default_variables.setdefault('LIB_DIR', + generator_default_variables['PRODUCT_DIR']) + + # Copy additional generator configuration data from Xcode, which is shared + # by the Mac Ninja generator. + import gyp.generator.xcode as xcode_generator + global generator_additional_non_configuration_keys + generator_additional_non_configuration_keys = getattr(xcode_generator, + 'generator_additional_non_configuration_keys', []) + global generator_additional_path_sections + generator_additional_path_sections = getattr(xcode_generator, + 'generator_additional_path_sections', []) + global generator_extra_sources_for_rules + generator_extra_sources_for_rules = getattr(xcode_generator, + 'generator_extra_sources_for_rules', []) + elif flavor == 'win': + default_variables['OS'] = 'win' + default_variables['EXECUTABLE_SUFFIX'] = '.exe' + default_variables['STATIC_LIB_PREFIX'] = '' + default_variables['STATIC_LIB_SUFFIX'] = '.lib' + default_variables['SHARED_LIB_PREFIX'] = '' + default_variables['SHARED_LIB_SUFFIX'] = '.dll' + else: + operating_system = flavor + if flavor == 'android': + operating_system = 'linux' # Keep this legacy behavior for now. + default_variables.setdefault('OS', operating_system) + default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') + default_variables.setdefault('SHARED_LIB_DIR', + os.path.join('$!PRODUCT_DIR', 'lib')) + default_variables.setdefault('LIB_DIR', '') + + +def OpenOutput(path): + """Open |path| for writing, creating directories if necessary.""" + try: + os.makedirs(os.path.dirname(path)) + except OSError: + pass + return open(path, 'w') + + +def GenerateOutputForConfig(target_list, target_dicts, data, params, + config_name): + options = params['options'] + flavor = gyp.common.GetFlavor(params) + generator_flags = params.get('generator_flags', {}) + + # build_dir: relative path from source root to our output files. + # e.g. "out/Debug" + build_dir = os.path.join(generator_flags.get('output_dir', 'out'), + config_name) + + master_ninja = ninja_syntax.Writer( + OpenOutput(os.path.join(options.toplevel_dir, build_dir, 'build.ninja')), + width=120) + + # Put build-time support tools in out/{config_name}. + gyp.common.CopyTool(flavor, os.path.join(options.toplevel_dir, build_dir)) + + # Grab make settings for CC/CXX. + if flavor == 'win': + cc = cxx = 'cl' + else: + cc, cxx = 'gcc', 'g++' + build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) + make_global_settings = data[build_file].get('make_global_settings', []) + build_to_root = InvertRelativePath(build_dir) + for key, value in make_global_settings: + if key == 'CC': cc = os.path.join(build_to_root, value) + if key == 'CXX': cxx = os.path.join(build_to_root, value) + + flock = 'flock' + if flavor == 'mac': + flock = './gyp-mac-tool flock' + master_ninja.variable('cc', os.environ.get('CC', cc)) + master_ninja.variable('cxx', os.environ.get('CXX', cxx)) + if flavor == 'win': + master_ninja.variable('ld', 'link') + else: + master_ninja.variable('ld', flock + ' linker.lock $cxx') + master_ninja.variable('cc_host', '$cc') + master_ninja.variable('cxx_host', '$cxx') + if flavor == 'mac': + master_ninja.variable('mac_tool', os.path.join('.', 'gyp-mac-tool')) + master_ninja.newline() + + if flavor != 'win': + master_ninja.rule( + 'cc', + description='CC $out', + command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c ' + '$cflags_pch_c -c $in -o $out'), + depfile='$out.d') + master_ninja.rule( + 'cxx', + description='CXX $out', + command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc ' + '$cflags_pch_cc -c $in -o $out'), + depfile='$out.d') + else: + # TODO(scottmg): Requires deplist branch of ninja for now (for + # /showIncludes handling). + master_ninja.rule( + 'cc', + description='CC $out', + command=('cmd /c $cc /nologo /showIncludes ' + '$defines $includes $cflags $cflags_c ' + '$cflags_pch_c /c $in /Fo$out ' + '| ninja-deplist-helper -f cl -o $out.dl'), + deplist='$out.dl') + master_ninja.rule( + 'cxx', + description='CXX $out', + command=('cmd /c $cxx /nologo /showIncludes ' + '$defines $includes $cflags $cflags_cc ' + '$cflags_pch_cc /c $in /Fo$out ' + '| ninja-deplist-helper -f cl -o $out.dl'), + deplist='$out.dl') + + if flavor != 'mac' and flavor != 'win': + master_ninja.rule( + 'alink', + description='AR $out', + command='rm -f $out && ar rcsT $out $in') + master_ninja.rule( + 'solink', + description='SOLINK $out', + command=('$ld -shared $ldflags -o $out -Wl,-soname=$soname ' + '-Wl,--whole-archive $in -Wl,--no-whole-archive $libs')) + master_ninja.rule( + 'solink_module', + description='SOLINK(module) $out', + command=('$ld -shared $ldflags -o $out -Wl,-soname=$soname ' + '-Wl,--start-group $in -Wl,--end-group $libs')) + master_ninja.rule( + 'link', + description='LINK $out', + command=('$ld $ldflags -o $out -Wl,-rpath=\$$ORIGIN/lib ' + '-Wl,--start-group $in -Wl,--end-group $libs')) + elif flavor == 'win': + master_ninja.rule( + 'alink', + description='AR $out', + command='lib /nologo /OUT:$out $in') + master_ninja.rule( + 'solink', + description='SOLINK $out', + command=('$ld /nologo /DLL $ldflags /OUT:$out $in $libs')) + master_ninja.rule( + 'solink_module', + description='SOLINK(module) $out', + command=('$ld /nologo /DLL $ldflags /OUT:$out $in $libs')) + master_ninja.rule( + 'link', + description='LINK $out', + command=('$ld /nologo $ldflags /OUT:$out $in $libs')) + else: + master_ninja.rule( + 'objc', + description='OBJC $out', + command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc ' + '$cflags_pch_objc -c $in -o $out'), + depfile='$out.d') + master_ninja.rule( + 'objcxx', + description='OBJCXX $out', + command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc ' + '$cflags_pch_objcc -c $in -o $out'), + depfile='$out.d') + master_ninja.rule( + 'alink', + description='LIBTOOL-STATIC $out, POSTBUILDS', + command='rm -f $out && ' + './gyp-mac-tool filter-libtool libtool -static -o $out $in' + '$postbuilds') + # TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass + # -bundle -single_module here (for osmesa.so). + master_ninja.rule( + 'solink', + description='SOLINK $out, POSTBUILDS', + command=('$ld -shared $ldflags -o $out ' + '$in $libs$postbuilds')) + master_ninja.rule( + 'solink_module', + description='SOLINK(module) $out, POSTBUILDS', + command=('$ld -shared $ldflags -o $out ' + '$in $libs$postbuilds')) + master_ninja.rule( + 'link', + description='LINK $out, POSTBUILDS', + command=('$ld $ldflags -o $out ' + '$in $libs$postbuilds')) + master_ninja.rule( + 'infoplist', + description='INFOPLIST $out', + command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && ' + 'plutil -convert xml1 $out $out')) + master_ninja.rule( + 'mac_tool', + description='MACTOOL $mactool_cmd $in', + command='$env $mac_tool $mactool_cmd $in $out') + master_ninja.rule( + 'package_framework', + description='PACKAGE FRAMEWORK $out, POSTBUILDS', + command='$mac_tool package-framework $out $version$postbuilds ' + '&& touch $out') + master_ninja.rule( + 'stamp', + description='STAMP $out', + command='${postbuilds}touch $out') + if flavor == 'win': + # TODO(scottmg): Copy fallback? + master_ninja.rule( + 'copy', + description='COPY $in $out', + command='cmd /c mklink /h $out $in >nul || mklink /h /j $out $in >nul') + else: + master_ninja.rule( + 'copy', + description='COPY $in $out', + command='ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)') + master_ninja.newline() + + all_targets = set() + for build_file in params['build_files']: + for target in gyp.common.AllTargets(target_list, target_dicts, build_file): + all_targets.add(target) + all_outputs = set() + + # target_outputs is a map from qualified target name to a Target object. + target_outputs = {} + for qualified_target in target_list: + # qualified_target is like: third_party/icu/icu.gyp:icui18n#target + build_file, name, toolset = \ + gyp.common.ParseQualifiedTarget(qualified_target) + + this_make_global_settings = data[build_file].get('make_global_settings', []) + assert make_global_settings == this_make_global_settings, ( + "make_global_settings needs to be the same for all targets.") + + spec = target_dicts[qualified_target] + if flavor == 'mac': + gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) + + build_file = gyp.common.RelativePath(build_file, options.toplevel_dir) + + base_path = os.path.dirname(build_file) + obj = 'obj' + if toolset != 'target': + obj += '.' + toolset + output_file = os.path.join(obj, base_path, name + '.ninja') + + abs_build_dir=os.path.abspath(os.path.join(options.toplevel_dir, build_dir)) + writer = NinjaWriter(target_outputs, base_path, build_dir, + OpenOutput(os.path.join(options.toplevel_dir, + build_dir, + output_file)), + flavor, abs_build_dir=abs_build_dir) + master_ninja.subninja(output_file) + + target = writer.WriteSpec(spec, config_name) + if target: + target_outputs[qualified_target] = target + if qualified_target in all_targets: + all_outputs.add(target.FinalOutput()) + + if all_outputs: + master_ninja.build('all', 'phony', list(all_outputs)) + + +def GenerateOutput(target_list, target_dicts, data, params): + if params['options'].generator_output: + raise NotImplementedError, "--generator_output not implemented for ninja" + + user_config = params.get('generator_flags', {}).get('config', None) + if user_config: + GenerateOutputForConfig(target_list, target_dicts, data, params, + user_config) + else: + config_names = target_dicts[target_list[0]]['configurations'].keys() + for config_name in config_names: + GenerateOutputForConfig(target_list, target_dicts, data, params, + config_name) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/ninja_test.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/ninja_test.py new file mode 100644 index 0000000000..af2e6d32fc --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/ninja_test.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Unit tests for the ninja.py file. """ + +import gyp.generator.ninja as ninja +import unittest +import StringIO +import TestCommon + + +class TestPrefixesAndSuffixes(unittest.TestCase): + def test_BinaryNamesWindows(self): + writer = ninja.NinjaWriter('wee', '.', '.', 'ninja.build', 'win') + spec = { 'target_name': 'wee' } + self.assertTrue(writer.ComputeOutputFileName(spec, 'executable'). + endswith('.exe')) + self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). + endswith('.dll')) + self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). + endswith('.lib')) + + def test_BinaryNamesLinux(self): + writer = ninja.NinjaWriter('wee', '.', '.', 'ninja.build', 'linux') + spec = { + 'target_name': 'wee' + } + self.assertTrue('.' not in writer.ComputeOutputFileName(spec, 'executable')) + self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). + startswith('lib')) + self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). + startswith('lib')) + self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). + endswith('.so')) + self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). + endswith('.a')) + +if __name__ == '__main__': + unittest.main() diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/scons.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/scons.py new file mode 100644 index 0000000000..90418875af --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/scons.py @@ -0,0 +1,1044 @@ +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import gyp +import gyp.common +import gyp.SCons as SCons +import os.path +import pprint +import re + + +# TODO: remove when we delete the last WriteList() call in this module +WriteList = SCons.WriteList + + +generator_default_variables = { + 'EXECUTABLE_PREFIX': '', + 'EXECUTABLE_SUFFIX': '', + 'STATIC_LIB_PREFIX': '${LIBPREFIX}', + 'SHARED_LIB_PREFIX': '${SHLIBPREFIX}', + 'STATIC_LIB_SUFFIX': '${LIBSUFFIX}', + 'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}', + 'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}', + 'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}', + 'OS': 'linux', + 'PRODUCT_DIR': '$TOP_BUILDDIR', + 'SHARED_LIB_DIR': '$LIB_DIR', + 'LIB_DIR': '$LIB_DIR', + 'RULE_INPUT_ROOT': '${SOURCE.filebase}', + 'RULE_INPUT_DIRNAME': '${SOURCE.dir}', + 'RULE_INPUT_EXT': '${SOURCE.suffix}', + 'RULE_INPUT_NAME': '${SOURCE.file}', + 'RULE_INPUT_PATH': '${SOURCE.abspath}', + 'CONFIGURATION_NAME': '${CONFIG_NAME}', +} + +# Tell GYP how to process the input for us. +generator_handles_variants = True +generator_wants_absolute_build_file_paths = True + + +def FixPath(path, prefix): + if not os.path.isabs(path) and not path[0] == '$': + path = prefix + path + return path + + +header = """\ +# This file is generated; do not edit. +""" + + +_alias_template = """ +if GetOption('verbose'): + _action = Action([%(action)s]) +else: + _action = Action([%(action)s], %(message)s) +_outputs = env.Alias( + ['_%(target_name)s_action'], + %(inputs)s, + _action +) +env.AlwaysBuild(_outputs) +""" + +_run_as_template = """ +if GetOption('verbose'): + _action = Action([%(action)s]) +else: + _action = Action([%(action)s], %(message)s) +""" + +_run_as_template_suffix = """ +_run_as_target = env.Alias('run_%(target_name)s', target_files, _action) +env.Requires(_run_as_target, [ + Alias('%(target_name)s'), +]) +env.AlwaysBuild(_run_as_target) +""" + +_command_template = """ +if GetOption('verbose'): + _action = Action([%(action)s]) +else: + _action = Action([%(action)s], %(message)s) +_outputs = env.Command( + %(outputs)s, + %(inputs)s, + _action +) +""" + +# This is copied from the default SCons action, updated to handle symlinks. +_copy_action_template = """ +import shutil +import SCons.Action + +def _copy_files_or_dirs_or_symlinks(dest, src): + SCons.Node.FS.invalidate_node_memos(dest) + if SCons.Util.is_List(src) and os.path.isdir(dest): + for file in src: + shutil.copy2(file, dest) + return 0 + elif os.path.islink(src): + linkto = os.readlink(src) + os.symlink(linkto, dest) + return 0 + elif os.path.isfile(src): + return shutil.copy2(src, dest) + else: + return shutil.copytree(src, dest, 1) + +def _copy_files_or_dirs_or_symlinks_str(dest, src): + return 'Copying %s to %s ...' % (src, dest) + +GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks, + _copy_files_or_dirs_or_symlinks_str, + convert=str) +""" + +_rule_template = """ +%(name)s_additional_inputs = %(inputs)s +%(name)s_outputs = %(outputs)s +def %(name)s_emitter(target, source, env): + return (%(name)s_outputs, source + %(name)s_additional_inputs) +if GetOption('verbose'): + %(name)s_action = Action([%(action)s]) +else: + %(name)s_action = Action([%(action)s], %(message)s) +env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action, + emitter=%(name)s_emitter) + +_outputs = [] +_processed_input_files = [] +for infile in input_files: + if (type(infile) == type('') + and not os.path.isabs(infile) + and not infile[0] == '$'): + infile = %(src_dir)r + infile + if str(infile).endswith('.%(extension)s'): + _generated = env.%(name)s(infile) + env.Precious(_generated) + _outputs.append(_generated) + %(process_outputs_as_sources_line)s + else: + _processed_input_files.append(infile) +prerequisites.extend(_outputs) +input_files = _processed_input_files +""" + +_spawn_hack = """ +import re +import SCons.Platform.posix +needs_shell = re.compile('["\\'>= 2.5: + return os.sysconf('SC_NPROCESSORS_ONLN') + else: # Mac OS X with Python < 2.5: + return int(os.popen2("sysctl -n hw.ncpu")[1].read()) + # Windows: + if os.environ.has_key('NUMBER_OF_PROCESSORS'): + return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1) + return 1 # Default + +# Support PROGRESS= to show progress in different ways. +p = ARGUMENTS.get('PROGRESS') +if p == 'spinner': + Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'], + interval=5, + file=open('/dev/tty', 'w')) +elif p == 'name': + Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w')) + +# Set the default -j value based on the number of processors. +SetOption('num_jobs', GetProcessorCount() + 1) + +# Have SCons use its cached dependency information. +SetOption('implicit_cache', 1) + +# Only re-calculate MD5 checksums if a timestamp has changed. +Decider('MD5-timestamp') + +# Since we set the -j value by default, suppress SCons warnings about being +# unable to support parallel build on versions of Python with no threading. +default_warnings = ['no-no-parallel-support'] +SetOption('warn', default_warnings + GetOption('warn')) + +AddOption('--mode', nargs=1, dest='conf_list', default=[], + action='append', help='Configuration to build.') + +AddOption('--verbose', dest='verbose', default=False, + action='store_true', help='Verbose command-line output.') + + +# +sconscript_file_map = %(sconscript_files)s + +class LoadTarget: + ''' + Class for deciding if a given target sconscript is to be included + based on a list of included target names, optionally prefixed with '-' + to exclude a target name. + ''' + def __init__(self, load): + ''' + Initialize a class with a list of names for possible loading. + + Arguments: + load: list of elements in the LOAD= specification + ''' + self.included = set([c for c in load if not c.startswith('-')]) + self.excluded = set([c[1:] for c in load if c.startswith('-')]) + + if not self.included: + self.included = set(['all']) + + def __call__(self, target): + ''' + Returns True if the specified target's sconscript file should be + loaded, based on the initialized included and excluded lists. + ''' + return (target in self.included or + ('all' in self.included and not target in self.excluded)) + +if 'LOAD' in ARGUMENTS: + load = ARGUMENTS['LOAD'].split(',') +else: + load = [] +load_target = LoadTarget(load) + +sconscript_files = [] +for target, sconscript in sconscript_file_map.iteritems(): + if load_target(target): + sconscript_files.append(sconscript) + + +target_alias_list= [] + +conf_list = GetOption('conf_list') +if conf_list: + # In case the same --mode= value was specified multiple times. + conf_list = list(set(conf_list)) +else: + conf_list = [%(default_configuration)r] + +sconsbuild_dir = Dir(%(sconsbuild_dir)s) + + +def FilterOut(self, **kw): + kw = SCons.Environment.copy_non_reserved_keywords(kw) + for key, val in kw.items(): + envval = self.get(key, None) + if envval is None: + # No existing variable in the environment, so nothing to delete. + continue + + for vremove in val: + # Use while not if, so we can handle duplicates. + while vremove in envval: + envval.remove(vremove) + + self[key] = envval + + # TODO(sgk): SCons.Environment.Append() has much more logic to deal + # with various types of values. We should handle all those cases in here + # too. (If variable is a dict, etc.) + + +non_compilable_suffixes = { + 'LINUX' : set([ + '.bdic', + '.css', + '.dat', + '.fragment', + '.gperf', + '.h', + '.hh', + '.hpp', + '.html', + '.hxx', + '.idl', + '.in', + '.in0', + '.in1', + '.js', + '.mk', + '.rc', + '.sigs', + '', + ]), + 'WINDOWS' : set([ + '.h', + '.hh', + '.hpp', + '.dat', + '.idl', + '.in', + '.in0', + '.in1', + ]), +} + +def compilable(env, file): + base, ext = os.path.splitext(str(file)) + if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]: + return False + return True + +def compilable_files(env, sources): + return [x for x in sources if compilable(env, x)] + +def GypProgram(env, target, source, *args, **kw): + source = compilable_files(env, source) + result = env.Program(target, source, *args, **kw) + if env.get('INCREMENTAL'): + env.Precious(result) + return result + +def GypTestProgram(env, target, source, *args, **kw): + source = compilable_files(env, source) + result = env.Program(target, source, *args, **kw) + if env.get('INCREMENTAL'): + env.Precious(*result) + return result + +def GypLibrary(env, target, source, *args, **kw): + source = compilable_files(env, source) + result = env.Library(target, source, *args, **kw) + return result + +def GypLoadableModule(env, target, source, *args, **kw): + source = compilable_files(env, source) + result = env.LoadableModule(target, source, *args, **kw) + return result + +def GypStaticLibrary(env, target, source, *args, **kw): + source = compilable_files(env, source) + result = env.StaticLibrary(target, source, *args, **kw) + return result + +def GypSharedLibrary(env, target, source, *args, **kw): + source = compilable_files(env, source) + result = env.SharedLibrary(target, source, *args, **kw) + if env.get('INCREMENTAL'): + env.Precious(result) + return result + +def add_gyp_methods(env): + env.AddMethod(GypProgram) + env.AddMethod(GypTestProgram) + env.AddMethod(GypLibrary) + env.AddMethod(GypLoadableModule) + env.AddMethod(GypStaticLibrary) + env.AddMethod(GypSharedLibrary) + + env.AddMethod(FilterOut) + + env.AddMethod(compilable) + + +base_env = Environment( + tools = %(scons_tools)s, + INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate', + LIB_DIR='$TOP_BUILDDIR/lib', + OBJ_DIR='$TOP_BUILDDIR/obj', + SCONSBUILD_DIR=sconsbuild_dir.abspath, + SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate', + SRC_DIR=Dir(%(src_dir)r), + TARGET_PLATFORM='LINUX', + TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME', + LIBPATH=['$LIB_DIR'], +) + +if not GetOption('verbose'): + base_env.SetDefault( + ARCOMSTR='Creating library $TARGET', + ASCOMSTR='Assembling $TARGET', + CCCOMSTR='Compiling $TARGET', + CONCATSOURCECOMSTR='ConcatSource $TARGET', + CXXCOMSTR='Compiling $TARGET', + LDMODULECOMSTR='Building loadable module $TARGET', + LINKCOMSTR='Linking $TARGET', + MANIFESTCOMSTR='Updating manifest for $TARGET', + MIDLCOMSTR='Compiling IDL $TARGET', + PCHCOMSTR='Precompiling $TARGET', + RANLIBCOMSTR='Indexing $TARGET', + RCCOMSTR='Compiling resource $TARGET', + SHCCCOMSTR='Compiling $TARGET', + SHCXXCOMSTR='Compiling $TARGET', + SHLINKCOMSTR='Linking $TARGET', + SHMANIFESTCOMSTR='Updating manifest for $TARGET', + ) + +add_gyp_methods(base_env) + +for conf in conf_list: + env = base_env.Clone(CONFIG_NAME=conf) + SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath) + for sconscript in sconscript_files: + target_alias = env.SConscript(sconscript, exports=['env']) + if target_alias: + target_alias_list.extend(target_alias) + +Default(Alias('all', target_alias_list)) + +help_fmt = ''' +Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ... + +Local command-line build options: + --mode=CONFIG Configuration to build: + --mode=Debug [default] + --mode=Release + --verbose Print actual executed command lines. + +Supported command-line build variables: + LOAD=[module,...] Comma-separated list of components to load in the + dependency graph ('-' prefix excludes) + PROGRESS=type Display a progress indicator: + name: print each evaluated target name + spinner: print a spinner every 5 targets + +The following TARGET names can also be used as LOAD= module names: + +%%s +''' + +if GetOption('help'): + def columnar_text(items, width=78, indent=2, sep=2): + result = [] + colwidth = max(map(len, items)) + sep + cols = (width - indent) / colwidth + if cols < 1: + cols = 1 + rows = (len(items) + cols - 1) / cols + indent = '%%*s' %% (indent, '') + sep = indent + for row in xrange(0, rows): + result.append(sep) + for i in xrange(row, len(items), rows): + result.append('%%-*s' %% (colwidth, items[i])) + sep = '\\n' + indent + result.append('\\n') + return ''.join(result) + + load_list = set(sconscript_file_map.keys()) + target_aliases = set(map(str, target_alias_list)) + + common = load_list and target_aliases + load_only = load_list - common + target_only = target_aliases - common + help_text = [help_fmt %% columnar_text(sorted(list(common)))] + if target_only: + fmt = "The following are additional TARGET names:\\n\\n%%s\\n" + help_text.append(fmt %% columnar_text(sorted(list(target_only)))) + if load_only: + fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n" + help_text.append(fmt %% columnar_text(sorted(list(load_only)))) + Help(''.join(help_text)) +""" + +# TEMPLATE END +############################################################################# + + +def GenerateSConscriptWrapper(build_file, build_file_data, name, + output_filename, sconscript_files, + default_configuration): + """ + Generates the "wrapper" SConscript file (analogous to the Visual Studio + solution) that calls all the individual target SConscript files. + """ + output_dir = os.path.dirname(output_filename) + src_dir = build_file_data['_DEPTH'] + src_dir_rel = gyp.common.RelativePath(src_dir, output_dir) + if not src_dir_rel: + src_dir_rel = '.' + scons_settings = build_file_data.get('scons_settings', {}) + sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#') + scons_tools = scons_settings.get('tools', ['default']) + + sconscript_file_lines = ['dict('] + for target in sorted(sconscript_files.keys()): + sconscript = sconscript_files[target] + sconscript_file_lines.append(' %s = %r,' % (target, sconscript)) + sconscript_file_lines.append(')') + + fp = open(output_filename, 'w') + fp.write(header) + fp.write(_wrapper_template % { + 'default_configuration' : default_configuration, + 'name' : name, + 'scons_tools' : repr(scons_tools), + 'sconsbuild_dir' : repr(sconsbuild_dir), + 'sconscript_files' : '\n'.join(sconscript_file_lines), + 'src_dir' : src_dir_rel, + }) + fp.close() + + # Generate the SConstruct file that invokes the wrapper SConscript. + dir, fname = os.path.split(output_filename) + SConstruct = os.path.join(dir, 'SConstruct') + fp = open(SConstruct, 'w') + fp.write(header) + fp.write('SConscript(%s)\n' % repr(fname)) + fp.close() + + +def TargetFilename(target, build_file=None, output_suffix=''): + """Returns the .scons file name for the specified target. + """ + if build_file is None: + build_file, target = gyp.common.ParseQualifiedTarget(target)[:2] + output_file = os.path.join(os.path.dirname(build_file), + target + output_suffix + '.scons') + return output_file + + +def GenerateOutput(target_list, target_dicts, data, params): + """ + Generates all the output files for the specified targets. + """ + options = params['options'] + + if options.generator_output: + def output_path(filename): + return filename.replace(params['cwd'], options.generator_output) + else: + def output_path(filename): + return filename + + default_configuration = None + + for qualified_target in target_list: + spec = target_dicts[qualified_target] + if spec['toolset'] != 'target': + raise Exception( + 'Multiple toolsets not supported in scons build (target %s)' % + qualified_target) + scons_target = SCons.Target(spec) + if scons_target.is_ignored: + continue + + # TODO: assumes the default_configuration of the first target + # non-Default target is the correct default for all targets. + # Need a better model for handle variation between targets. + if (not default_configuration and + spec['default_configuration'] != 'Default'): + default_configuration = spec['default_configuration'] + + build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2] + output_file = TargetFilename(target, build_file, options.suffix) + if options.generator_output: + output_file = output_path(output_file) + + if not spec.has_key('libraries'): + spec['libraries'] = [] + + # Add dependent static library targets to the 'libraries' value. + deps = spec.get('dependencies', []) + spec['scons_dependencies'] = [] + for d in deps: + td = target_dicts[d] + target_name = td['target_name'] + spec['scons_dependencies'].append("Alias('%s')" % target_name) + if td['type'] in ('static_library', 'shared_library'): + libname = td.get('product_name', target_name) + spec['libraries'].append('lib' + libname) + if td['type'] == 'loadable_module': + prereqs = spec.get('scons_prerequisites', []) + # TODO: parameterize with <(SHARED_LIBRARY_*) variables? + td_target = SCons.Target(td) + td_target.target_prefix = '${SHLIBPREFIX}' + td_target.target_suffix = '${SHLIBSUFFIX}' + + GenerateSConscript(output_file, spec, build_file, data[build_file]) + + if not default_configuration: + default_configuration = 'Default' + + for build_file in sorted(data.keys()): + path, ext = os.path.splitext(build_file) + if ext != '.gyp': + continue + output_dir, basename = os.path.split(path) + output_filename = path + '_main' + options.suffix + '.scons' + + all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file) + sconscript_files = {} + for t in all_targets: + scons_target = SCons.Target(target_dicts[t]) + if scons_target.is_ignored: + continue + bf, target = gyp.common.ParseQualifiedTarget(t)[:2] + target_filename = TargetFilename(target, bf, options.suffix) + tpath = gyp.common.RelativePath(target_filename, output_dir) + sconscript_files[target] = tpath + + output_filename = output_path(output_filename) + if sconscript_files: + GenerateSConscriptWrapper(build_file, data[build_file], basename, + output_filename, sconscript_files, + default_configuration) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/xcode.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/xcode.py new file mode 100644 index 0000000000..8b17517b76 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/xcode.py @@ -0,0 +1,1205 @@ +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import filecmp +import gyp.common +import gyp.xcodeproj_file +import errno +import os +import posixpath +import re +import shutil +import subprocess +import tempfile + + +# Project files generated by this module will use _intermediate_var as a +# custom Xcode setting whose value is a DerivedSources-like directory that's +# project-specific and configuration-specific. The normal choice, +# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive +# as it is likely that multiple targets within a single project file will want +# to access the same set of generated files. The other option, +# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific, +# it is not configuration-specific. INTERMEDIATE_DIR is defined as +# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION). +_intermediate_var = 'INTERMEDIATE_DIR' + +# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all +# targets that share the same BUILT_PRODUCTS_DIR. +_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR' + +_library_search_paths_var = 'LIBRARY_SEARCH_PATHS' + +generator_default_variables = { + 'EXECUTABLE_PREFIX': '', + 'EXECUTABLE_SUFFIX': '', + 'STATIC_LIB_PREFIX': 'lib', + 'SHARED_LIB_PREFIX': 'lib', + 'STATIC_LIB_SUFFIX': '.a', + 'SHARED_LIB_SUFFIX': '.dylib', + # INTERMEDIATE_DIR is a place for targets to build up intermediate products. + # It is specific to each build environment. It is only guaranteed to exist + # and be constant within the context of a project, corresponding to a single + # input file. Some build environments may allow their intermediate directory + # to be shared on a wider scale, but this is not guaranteed. + 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var, + 'OS': 'mac', + 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)', + 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)', + 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)', + 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)', + 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)', + 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)', + 'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)', + 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var, + 'CONFIGURATION_NAME': '$(CONFIGURATION)', +} + +# The Xcode-specific sections that hold paths. +generator_additional_path_sections = [ + 'mac_bundle_resources', + 'mac_framework_headers', + 'mac_framework_private_headers', + # 'mac_framework_dirs', input already handles _dirs endings. +] + +# The Xcode-specific keys that exist on targets and aren't moved down to +# configurations. +generator_additional_non_configuration_keys = [ + 'mac_bundle', + 'mac_bundle_resources', + 'mac_framework_headers', + 'mac_framework_private_headers', + 'xcode_create_dependents_test_runner', +] + +# We want to let any rules apply to files that are resources also. +generator_extra_sources_for_rules = [ + 'mac_bundle_resources', + 'mac_framework_headers', + 'mac_framework_private_headers', +] + +# Xcode's standard set of library directories, which don't need to be duplicated +# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay. +xcode_standard_library_dirs = frozenset([ + '$(SDKROOT)/usr/lib', + '$(SDKROOT)/usr/local/lib', +]) + +def CreateXCConfigurationList(configuration_names): + xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []}) + if len(configuration_names) == 0: + configuration_names = ['Default'] + for configuration_name in configuration_names: + xcbc = gyp.xcodeproj_file.XCBuildConfiguration({ + 'name': configuration_name}) + xccl.AppendProperty('buildConfigurations', xcbc) + xccl.SetProperty('defaultConfigurationName', configuration_names[0]) + return xccl + + +class XcodeProject(object): + def __init__(self, gyp_path, path, build_file_dict): + self.gyp_path = gyp_path + self.path = path + self.project = gyp.xcodeproj_file.PBXProject(path=path) + projectDirPath = gyp.common.RelativePath( + os.path.dirname(os.path.abspath(self.gyp_path)), + os.path.dirname(path) or '.') + self.project.SetProperty('projectDirPath', projectDirPath) + self.project_file = \ + gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project}) + self.build_file_dict = build_file_dict + + # TODO(mark): add destructor that cleans up self.path if created_dir is + # True and things didn't complete successfully. Or do something even + # better with "try"? + self.created_dir = False + try: + os.makedirs(self.path) + self.created_dir = True + except OSError, e: + if e.errno != errno.EEXIST: + raise + + def Finalize1(self, xcode_targets, serialize_all_tests): + # Collect a list of all of the build configuration names used by the + # various targets in the file. It is very heavily advised to keep each + # target in an entire project (even across multiple project files) using + # the same set of configuration names. + configurations = [] + for xct in self.project.GetProperty('targets'): + xccl = xct.GetProperty('buildConfigurationList') + xcbcs = xccl.GetProperty('buildConfigurations') + for xcbc in xcbcs: + name = xcbc.GetProperty('name') + if name not in configurations: + configurations.append(name) + + # Replace the XCConfigurationList attached to the PBXProject object with + # a new one specifying all of the configuration names used by the various + # targets. + try: + xccl = CreateXCConfigurationList(configurations) + self.project.SetProperty('buildConfigurationList', xccl) + except: + import sys + sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path) + raise + + # The need for this setting is explained above where _intermediate_var is + # defined. The comments below about wanting to avoid project-wide build + # settings apply here too, but this needs to be set on a project-wide basis + # so that files relative to the _intermediate_var setting can be displayed + # properly in the Xcode UI. + # + # Note that for configuration-relative files such as anything relative to + # _intermediate_var, for the purposes of UI tree view display, Xcode will + # only resolve the configuration name once, when the project file is + # opened. If the active build configuration is changed, the project file + # must be closed and reopened if it is desired for the tree view to update. + # This is filed as Apple radar 6588391. + xccl.SetBuildSetting(_intermediate_var, + '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)') + xccl.SetBuildSetting(_shared_intermediate_var, + '$(SYMROOT)/DerivedSources/$(CONFIGURATION)') + + # Set user-specified project-wide build settings and config files. This + # is intended to be used very sparingly. Really, almost everything should + # go into target-specific build settings sections. The project-wide + # settings are only intended to be used in cases where Xcode attempts to + # resolve variable references in a project context as opposed to a target + # context, such as when resolving sourceTree references while building up + # the tree tree view for UI display. + # Any values set globally are applied to all configurations, then any + # per-configuration values are applied. + for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems(): + xccl.SetBuildSetting(xck, xcv) + if 'xcode_config_file' in self.build_file_dict: + config_ref = self.project.AddOrGetFileInRootGroup( + self.build_file_dict['xcode_config_file']) + xccl.SetBaseConfiguration(config_ref) + build_file_configurations = self.build_file_dict.get('configurations', {}) + if build_file_configurations: + for config_name in configurations: + build_file_configuration_named = \ + build_file_configurations.get(config_name, {}) + if build_file_configuration_named: + xcc = xccl.ConfigurationNamed(config_name) + for xck, xcv in build_file_configuration_named.get('xcode_settings', + {}).iteritems(): + xcc.SetBuildSetting(xck, xcv) + if 'xcode_config_file' in build_file_configuration_named: + config_ref = self.project.AddOrGetFileInRootGroup( + build_file_configurations[config_name]['xcode_config_file']) + xcc.SetBaseConfiguration(config_ref) + + # Sort the targets based on how they appeared in the input. + # TODO(mark): Like a lot of other things here, this assumes internal + # knowledge of PBXProject - in this case, of its "targets" property. + + # ordinary_targets are ordinary targets that are already in the project + # file. run_test_targets are the targets that run unittests and should be + # used for the Run All Tests target. support_targets are the action/rule + # targets used by GYP file targets, just kept for the assert check. + ordinary_targets = [] + run_test_targets = [] + support_targets = [] + + # targets is full list of targets in the project. + targets = [] + + # does the it define it's own "all"? + has_custom_all = False + + # targets_for_all is the list of ordinary_targets that should be listed + # in this project's "All" target. It includes each non_runtest_target + # that does not have suppress_wildcard set. + targets_for_all = [] + + for target in self.build_file_dict['targets']: + target_name = target['target_name'] + toolset = target['toolset'] + qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name, + toolset) + xcode_target = xcode_targets[qualified_target] + # Make sure that the target being added to the sorted list is already in + # the unsorted list. + assert xcode_target in self.project._properties['targets'] + targets.append(xcode_target) + ordinary_targets.append(xcode_target) + if xcode_target.support_target: + support_targets.append(xcode_target.support_target) + targets.append(xcode_target.support_target) + + if not int(target.get('suppress_wildcard', False)): + targets_for_all.append(xcode_target) + + if target_name.lower() == 'all': + has_custom_all = True; + + # If this target has a 'run_as' attribute, add its target to the + # targets, and add it to the test targets. + if target.get('run_as'): + # Make a target to run something. It should have one + # dependency, the parent xcode target. + xccl = CreateXCConfigurationList(configurations) + run_target = gyp.xcodeproj_file.PBXAggregateTarget({ + 'name': 'Run ' + target_name, + 'productName': xcode_target.GetProperty('productName'), + 'buildConfigurationList': xccl, + }, + parent=self.project) + run_target.AddDependency(xcode_target) + + command = target['run_as'] + script = '' + if command.get('working_directory'): + script = script + 'cd "%s"\n' % \ + gyp.xcodeproj_file.ConvertVariablesToShellSyntax( + command.get('working_directory')) + + if command.get('environment'): + script = script + "\n".join( + ['export %s="%s"' % + (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) + for (key, val) in command.get('environment').iteritems()]) + "\n" + + # Some test end up using sockets, files on disk, etc. and can get + # confused if more then one test runs at a time. The generator + # flag 'xcode_serialize_all_test_runs' controls the forcing of all + # tests serially. It defaults to True. To get serial runs this + # little bit of python does the same as the linux flock utility to + # make sure only one runs at a time. + command_prefix = '' + if serialize_all_tests: + command_prefix = \ +"""python -c "import fcntl, subprocess, sys +file = open('$TMPDIR/GYP_serialize_test_runs', 'a') +fcntl.flock(file.fileno(), fcntl.LOCK_EX) +sys.exit(subprocess.call(sys.argv[1:]))" """ + + # If we were unable to exec for some reason, we want to exit + # with an error, and fixup variable references to be shell + # syntax instead of xcode syntax. + script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \ + gyp.xcodeproj_file.ConvertVariablesToShellSyntax( + gyp.common.EncodePOSIXShellList(command.get('action'))) + + ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ + 'shellScript': script, + 'showEnvVarsInLog': 0, + }) + run_target.AppendProperty('buildPhases', ssbp) + + # Add the run target to the project file. + targets.append(run_target) + run_test_targets.append(run_target) + xcode_target.test_runner = run_target + + + # Make sure that the list of targets being replaced is the same length as + # the one replacing it, but allow for the added test runner targets. + assert len(self.project._properties['targets']) == \ + len(ordinary_targets) + len(support_targets) + + self.project._properties['targets'] = targets + + # Get rid of unnecessary levels of depth in groups like the Source group. + self.project.RootGroupsTakeOverOnlyChildren(True) + + # Sort the groups nicely. Do this after sorting the targets, because the + # Products group is sorted based on the order of the targets. + self.project.SortGroups() + + # Create an "All" target if there's more than one target in this project + # file and the project didn't define its own "All" target. Put a generated + # "All" target first so that people opening up the project for the first + # time will build everything by default. + if len(targets_for_all) > 1 and not has_custom_all: + xccl = CreateXCConfigurationList(configurations) + all_target = gyp.xcodeproj_file.PBXAggregateTarget( + { + 'buildConfigurationList': xccl, + 'name': 'All', + }, + parent=self.project) + + for target in targets_for_all: + all_target.AddDependency(target) + + # TODO(mark): This is evil because it relies on internal knowledge of + # PBXProject._properties. It's important to get the "All" target first, + # though. + self.project._properties['targets'].insert(0, all_target) + + # The same, but for run_test_targets. + if len(run_test_targets) > 1: + xccl = CreateXCConfigurationList(configurations) + run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( + { + 'buildConfigurationList': xccl, + 'name': 'Run All Tests', + }, + parent=self.project) + for run_test_target in run_test_targets: + run_all_tests_target.AddDependency(run_test_target) + + # Insert after the "All" target, which must exist if there is more than + # one run_test_target. + self.project._properties['targets'].insert(1, run_all_tests_target) + + def Finalize2(self, xcode_targets, xcode_target_to_target_dict): + # Finalize2 needs to happen in a separate step because the process of + # updating references to other projects depends on the ordering of targets + # within remote project files. Finalize1 is responsible for sorting duty, + # and once all project files are sorted, Finalize2 can come in and update + # these references. + + # To support making a "test runner" target that will run all the tests + # that are direct dependents of any given target, we look for + # xcode_create_dependents_test_runner being set on an Aggregate target, + # and generate a second target that will run the tests runners found under + # the marked target. + for bf_tgt in self.build_file_dict['targets']: + if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)): + tgt_name = bf_tgt['target_name'] + toolset = bf_tgt['toolset'] + qualified_target = gyp.common.QualifiedTarget(self.gyp_path, + tgt_name, toolset) + xcode_target = xcode_targets[qualified_target] + if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): + # Collect all the run test targets. + all_run_tests = [] + pbxtds = xcode_target.GetProperty('dependencies') + for pbxtd in pbxtds: + pbxcip = pbxtd.GetProperty('targetProxy') + dependency_xct = pbxcip.GetProperty('remoteGlobalIDString') + if hasattr(dependency_xct, 'test_runner'): + all_run_tests.append(dependency_xct.test_runner) + + # Directly depend on all the runners as they depend on the target + # that builds them. + if len(all_run_tests) > 0: + run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({ + 'name': 'Run %s Tests' % tgt_name, + 'productName': tgt_name, + }, + parent=self.project) + for run_test_target in all_run_tests: + run_all_target.AddDependency(run_test_target) + + # Insert the test runner after the related target. + idx = self.project._properties['targets'].index(xcode_target) + self.project._properties['targets'].insert(idx + 1, run_all_target) + + # Update all references to other projects, to make sure that the lists of + # remote products are complete. Otherwise, Xcode will fill them in when + # it opens the project file, which will result in unnecessary diffs. + # TODO(mark): This is evil because it relies on internal knowledge of + # PBXProject._other_pbxprojects. + for other_pbxproject in self.project._other_pbxprojects.keys(): + self.project.AddOrGetProjectReference(other_pbxproject) + + self.project.SortRemoteProductReferences() + + # Give everything an ID. + self.project_file.ComputeIDs() + + # Make sure that no two objects in the project file have the same ID. If + # multiple objects wind up with the same ID, upon loading the file, Xcode + # will only recognize one object (the last one in the file?) and the + # results are unpredictable. + self.project_file.EnsureNoIDCollisions() + + def Write(self): + # Write the project file to a temporary location first. Xcode watches for + # changes to the project file and presents a UI sheet offering to reload + # the project when it does change. However, in some cases, especially when + # multiple projects are open or when Xcode is busy, things don't work so + # seamlessly. Sometimes, Xcode is able to detect that a project file has + # changed but can't unload it because something else is referencing it. + # To mitigate this problem, and to avoid even having Xcode present the UI + # sheet when an open project is rewritten for inconsequential changes, the + # project file is written to a temporary file in the xcodeproj directory + # first. The new temporary file is then compared to the existing project + # file, if any. If they differ, the new file replaces the old; otherwise, + # the new project file is simply deleted. Xcode properly detects a file + # being renamed over an open project file as a change and so it remains + # able to present the "project file changed" sheet under this system. + # Writing to a temporary file first also avoids the possible problem of + # Xcode rereading an incomplete project file. + (output_fd, new_pbxproj_path) = \ + tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.', + dir=self.path) + + try: + output_file = os.fdopen(output_fd, 'wb') + + self.project_file.Print(output_file) + output_file.close() + + pbxproj_path = os.path.join(self.path, 'project.pbxproj') + + same = False + try: + same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) + except OSError, e: + if e.errno != errno.ENOENT: + raise + + if same: + # The new file is identical to the old one, just get rid of the new + # one. + os.unlink(new_pbxproj_path) + else: + # The new file is different from the old one, or there is no old one. + # Rename the new file to the permanent name. + # + # tempfile.mkstemp uses an overly restrictive mode, resulting in a + # file that can only be read by the owner, regardless of the umask. + # There's no reason to not respect the umask here, which means that + # an extra hoop is required to fetch it and reset the new file's mode. + # + # No way to get the umask without setting a new one? Set a safe one + # and then set it back to the old value. + umask = os.umask(077) + os.umask(umask) + + os.chmod(new_pbxproj_path, 0666 & ~umask) + os.rename(new_pbxproj_path, pbxproj_path) + + except Exception: + # Don't leave turds behind. In fact, if this code was responsible for + # creating the xcodeproj directory, get rid of that too. + os.unlink(new_pbxproj_path) + if self.created_dir: + shutil.rmtree(self.path, True) + raise + + +cached_xcode_version = None +def InstalledXcodeVersion(): + """Fetches the installed version of Xcode, returns empty string if it is + unable to figure it out.""" + + global cached_xcode_version + if not cached_xcode_version is None: + return cached_xcode_version + + # Default to an empty string + cached_xcode_version = '' + + # Collect the xcodebuild's version information. + try: + import subprocess + cmd = ['/usr/bin/xcodebuild', '-version'] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) + xcodebuild_version_info = proc.communicate()[0] + # Any error, return empty string + if proc.returncode: + xcodebuild_version_info = '' + except OSError: + # We failed to launch the tool + xcodebuild_version_info = '' + + # Pull out the Xcode version itself. + match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE) + if match_line: + cached_xcode_version = match_line.group(1) + # Done! + return cached_xcode_version + + +def AddSourceToTarget(source, pbxp, xct): + # TODO(mark): Perhaps source_extensions and library_extensions can be made a + # little bit fancier. + source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's'] + + # .o is conceptually more of a "source" than a "library," but Xcode thinks + # of "sources" as things to compile and "libraries" (or "frameworks") as + # things to link with. Adding an object file to an Xcode target's frameworks + # phase works properly. + library_extensions = ['a', 'dylib', 'framework', 'o'] + + basename = posixpath.basename(source) + (root, ext) = posixpath.splitext(basename) + if ext != '': + ext = ext[1:].lower() + + if ext in source_extensions: + xct.SourcesPhase().AddFile(source) + elif ext in library_extensions: + xct.FrameworksPhase().AddFile(source) + else: + # Files that aren't added to a sources or frameworks build phase can still + # go into the project file, just not as part of a build phase. + pbxp.AddOrGetFileInRootGroup(source) + + +def AddResourceToTarget(resource, pbxp, xct): + # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call + # where it's used. + xct.ResourcesPhase().AddFile(resource) + + +def AddHeaderToTarget(header, pbxp, xct, is_public): + # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call + # where it's used. + settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public] + xct.HeadersPhase().AddFile(header, settings) + + +_xcode_variable_re = re.compile('(\$\((.*?)\))') +def ExpandXcodeVariables(string, expansions): + """Expands Xcode-style $(VARIABLES) in string per the expansions dict. + + In some rare cases, it is appropriate to expand Xcode variables when a + project file is generated. For any substring $(VAR) in string, if VAR is a + key in the expansions dict, $(VAR) will be replaced with expansions[VAR]. + Any $(VAR) substring in string for which VAR is not a key in the expansions + dict will remain in the returned string. + """ + + matches = _xcode_variable_re.findall(string) + if matches == None: + return string + + matches.reverse() + for match in matches: + (to_replace, variable) = match + if not variable in expansions: + continue + + replacement = expansions[variable] + string = re.sub(re.escape(to_replace), replacement, string) + + return string + + +def EscapeXCodeArgument(s): + """We must escape the arguments that we give to XCode so that it knows not to + split on spaces and to respect backslash and quote literals.""" + s = s.replace('\\', '\\\\') + s = s.replace('"', '\\"') + return '"' + s + '"' + + +def GenerateOutput(target_list, target_dicts, data, params): + options = params['options'] + generator_flags = params.get('generator_flags', {}) + parallel_builds = generator_flags.get('xcode_parallel_builds', True) + serialize_all_tests = \ + generator_flags.get('xcode_serialize_all_test_runs', True) + project_version = generator_flags.get('xcode_project_version', None) + skip_excluded_files = \ + not generator_flags.get('xcode_list_excluded_files', True) + xcode_projects = {} + for build_file, build_file_dict in data.iteritems(): + (build_file_root, build_file_ext) = os.path.splitext(build_file) + if build_file_ext != '.gyp': + continue + xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' + if options.generator_output: + xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) + xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict) + xcode_projects[build_file] = xcp + pbxp = xcp.project + + if parallel_builds: + pbxp.SetProperty('attributes', + {'BuildIndependentTargetsInParallel': 'YES'}) + if project_version: + xcp.project_file.SetXcodeVersion(project_version) + + main_group = pbxp.GetProperty('mainGroup') + build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'}) + main_group.AppendChild(build_group) + for included_file in build_file_dict['included_files']: + build_group.AddOrGetFileByPath(included_file, False) + + xcode_targets = {} + xcode_target_to_target_dict = {} + for qualified_target in target_list: + [build_file, target_name, toolset] = \ + gyp.common.ParseQualifiedTarget(qualified_target) + + spec = target_dicts[qualified_target] + if spec['toolset'] != 'target': + raise Exception( + 'Multiple toolsets not supported in xcode build (target %s)' % + qualified_target) + configuration_names = [spec['default_configuration']] + for configuration_name in sorted(spec['configurations'].keys()): + if configuration_name not in configuration_names: + configuration_names.append(configuration_name) + xcp = xcode_projects[build_file] + pbxp = xcp.project + + # Set up the configurations for the target according to the list of names + # supplied. + xccl = CreateXCConfigurationList(configuration_names) + + # Create an XCTarget subclass object for the target. The type with + # "+bundle" appended will be used if the target has "mac_bundle" set. + # loadable_modules not in a mac_bundle are mapped to + # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets + # to create a single-file mh_bundle. + _types = { + 'executable': 'com.apple.product-type.tool', + 'loadable_module': 'com.googlecode.gyp.xcode.bundle', + 'shared_library': 'com.apple.product-type.library.dynamic', + 'static_library': 'com.apple.product-type.library.static', + 'executable+bundle': 'com.apple.product-type.application', + 'loadable_module+bundle': 'com.apple.product-type.bundle', + 'shared_library+bundle': 'com.apple.product-type.framework', + } + + target_properties = { + 'buildConfigurationList': xccl, + 'name': target_name, + } + + type = spec['type'] + is_bundle = int(spec.get('mac_bundle', 0)) + if type != 'none': + type_bundle_key = type + if is_bundle: + type_bundle_key += '+bundle' + xctarget_type = gyp.xcodeproj_file.PBXNativeTarget + try: + target_properties['productType'] = _types[type_bundle_key] + except KeyError, e: + gyp.common.ExceptionAppend(e, "-- unknown product type while " + "writing target %s" % target_name) + raise + else: + xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget + assert not is_bundle, ( + 'mac_bundle targets cannot have type none (target "%s")' % + target_name) + + target_product_name = spec.get('product_name') + if target_product_name is not None: + target_properties['productName'] = target_product_name + + xct = xctarget_type(target_properties, parent=pbxp, + force_outdir=spec.get('product_dir'), + force_prefix=spec.get('product_prefix'), + force_extension=spec.get('product_extension')) + pbxp.AppendProperty('targets', xct) + xcode_targets[qualified_target] = xct + xcode_target_to_target_dict[xct] = spec + + spec_actions = spec.get('actions', []) + spec_rules = spec.get('rules', []) + + # Xcode has some "issues" with checking dependencies for the "Compile + # sources" step with any source files/headers generated by actions/rules. + # To work around this, if a target is building anything directly (not + # type "none"), then a second target as used to run the GYP actions/rules + # and is made a dependency of this target. This way the work is done + # before the dependency checks for what should be recompiled. + support_xct = None + if type != 'none' and (spec_actions or spec_rules): + support_xccl = CreateXCConfigurationList(configuration_names); + support_target_properties = { + 'buildConfigurationList': support_xccl, + 'name': target_name + ' Support', + } + if target_product_name: + support_target_properties['productName'] = \ + target_product_name + ' Support' + support_xct = \ + gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties, + parent=pbxp) + pbxp.AppendProperty('targets', support_xct) + xct.AddDependency(support_xct) + # Hang the support target off the main target so it can be tested/found + # by the generator during Finalize. + xct.support_target = support_xct + + prebuild_index = 0 + + # Add custom shell script phases for "actions" sections. + for action in spec_actions: + # There's no need to write anything into the script to ensure that the + # output directories already exist, because Xcode will look at the + # declared outputs and automatically ensure that they exist for us. + + # Do we have a message to print when this action runs? + message = action.get('message') + if message: + message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message) + else: + message = '' + + # Turn the list into a string that can be passed to a shell. + action_string = gyp.common.EncodePOSIXShellList(action['action']) + + # Convert Xcode-type variable references to sh-compatible environment + # variable references. + message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) + action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( + action_string) + + script = '' + # Include the optional message + if message_sh: + script += message_sh + '\n' + # Be sure the script runs in exec, and that if exec fails, the script + # exits signalling an error. + script += 'exec ' + action_string_sh + '\nexit 1\n' + ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ + 'inputPaths': action['inputs'], + 'name': 'Action "' + action['action_name'] + '"', + 'outputPaths': action['outputs'], + 'shellScript': script, + 'showEnvVarsInLog': 0, + }) + + if support_xct: + support_xct.AppendProperty('buildPhases', ssbp) + else: + # TODO(mark): this assumes too much knowledge of the internals of + # xcodeproj_file; some of these smarts should move into xcodeproj_file + # itself. + xct._properties['buildPhases'].insert(prebuild_index, ssbp) + prebuild_index = prebuild_index + 1 + + # TODO(mark): Should verify that at most one of these is specified. + if int(action.get('process_outputs_as_sources', False)): + for output in action['outputs']: + AddSourceToTarget(output, pbxp, xct) + + if int(action.get('process_outputs_as_mac_bundle_resources', False)): + for output in action['outputs']: + AddResourceToTarget(output, pbxp, xct) + + # tgt_mac_bundle_resources holds the list of bundle resources so + # the rule processing can check against it. + if is_bundle: + tgt_mac_bundle_resources = spec.get('mac_bundle_resources', []) + else: + tgt_mac_bundle_resources = [] + + # Add custom shell script phases driving "make" for "rules" sections. + # + # Xcode's built-in rule support is almost powerful enough to use directly, + # but there are a few significant deficiencies that render them unusable. + # There are workarounds for some of its inadequacies, but in aggregate, + # the workarounds added complexity to the generator, and some workarounds + # actually require input files to be crafted more carefully than I'd like. + # Consequently, until Xcode rules are made more capable, "rules" input + # sections will be handled in Xcode output by shell script build phases + # performed prior to the compilation phase. + # + # The following problems with Xcode rules were found. The numbers are + # Apple radar IDs. I hope that these shortcomings are addressed, I really + # liked having the rules handled directly in Xcode during the period that + # I was prototyping this. + # + # 6588600 Xcode compiles custom script rule outputs too soon, compilation + # fails. This occurs when rule outputs from distinct inputs are + # interdependent. The only workaround is to put rules and their + # inputs in a separate target from the one that compiles the rule + # outputs. This requires input file cooperation and it means that + # process_outputs_as_sources is unusable. + # 6584932 Need to declare that custom rule outputs should be excluded from + # compilation. A possible workaround is to lie to Xcode about a + # rule's output, giving it a dummy file it doesn't know how to + # compile. The rule action script would need to touch the dummy. + # 6584839 I need a way to declare additional inputs to a custom rule. + # A possible workaround is a shell script phase prior to + # compilation that touches a rule's primary input files if any + # would-be additional inputs are newer than the output. Modifying + # the source tree - even just modification times - feels dirty. + # 6564240 Xcode "custom script" build rules always dump all environment + # variables. This is a low-prioroty problem and is not a + # show-stopper. + rules_by_ext = {} + for rule in spec_rules: + rules_by_ext[rule['extension']] = rule + + # First, some definitions: + # + # A "rule source" is a file that was listed in a target's "sources" + # list and will have a rule applied to it on the basis of matching the + # rule's "extensions" attribute. Rule sources are direct inputs to + # rules. + # + # Rule definitions may specify additional inputs in their "inputs" + # attribute. These additional inputs are used for dependency tracking + # purposes. + # + # A "concrete output" is a rule output with input-dependent variables + # resolved. For example, given a rule with: + # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'], + # if the target's "sources" list contained "one.ext" and "two.ext", + # the "concrete output" for rule input "two.ext" would be "two.cc". If + # a rule specifies multiple outputs, each input file that the rule is + # applied to will have the same number of concrete outputs. + # + # If any concrete outputs are outdated or missing relative to their + # corresponding rule_source or to any specified additional input, the + # rule action must be performed to generate the concrete outputs. + + # concrete_outputs_by_rule_source will have an item at the same index + # as the rule['rule_sources'] that it corresponds to. Each item is a + # list of all of the concrete outputs for the rule_source. + concrete_outputs_by_rule_source = [] + + # concrete_outputs_all is a flat list of all concrete outputs that this + # rule is able to produce, given the known set of input files + # (rule_sources) that apply to it. + concrete_outputs_all = [] + + # messages & actions are keyed by the same indices as rule['rule_sources'] + # and concrete_outputs_by_rule_source. They contain the message and + # action to perform after resolving input-dependent variables. The + # message is optional, in which case None is stored for each rule source. + messages = [] + actions = [] + + for rule_source in rule.get('rule_sources', []): + rule_source_dirname, rule_source_basename = \ + posixpath.split(rule_source) + (rule_source_root, rule_source_ext) = \ + posixpath.splitext(rule_source_basename) + + # These are the same variable names that Xcode uses for its own native + # rule support. Because Xcode's rule engine is not being used, they + # need to be expanded as they are written to the makefile. + rule_input_dict = { + 'INPUT_FILE_BASE': rule_source_root, + 'INPUT_FILE_SUFFIX': rule_source_ext, + 'INPUT_FILE_NAME': rule_source_basename, + 'INPUT_FILE_PATH': rule_source, + 'INPUT_FILE_DIRNAME': rule_source_dirname, + } + + concrete_outputs_for_this_rule_source = [] + for output in rule.get('outputs', []): + # Fortunately, Xcode and make both use $(VAR) format for their + # variables, so the expansion is the only transformation necessary. + # Any remaning $(VAR)-type variables in the string can be given + # directly to make, which will pick up the correct settings from + # what Xcode puts into the environment. + concrete_output = ExpandXcodeVariables(output, rule_input_dict) + concrete_outputs_for_this_rule_source.append(concrete_output) + + # Add all concrete outputs to the project. + pbxp.AddOrGetFileInRootGroup(concrete_output) + + concrete_outputs_by_rule_source.append( \ + concrete_outputs_for_this_rule_source) + concrete_outputs_all.extend(concrete_outputs_for_this_rule_source) + + # TODO(mark): Should verify that at most one of these is specified. + if int(rule.get('process_outputs_as_sources', False)): + for output in concrete_outputs_for_this_rule_source: + AddSourceToTarget(output, pbxp, xct) + + # If the file came from the mac_bundle_resources list or if the rule + # is marked to process outputs as bundle resource, do so. + was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources + if was_mac_bundle_resource or \ + int(rule.get('process_outputs_as_mac_bundle_resources', False)): + for output in concrete_outputs_for_this_rule_source: + AddResourceToTarget(output, pbxp, xct) + + # Do we have a message to print when this rule runs? + message = rule.get('message') + if message: + message = gyp.common.EncodePOSIXShellArgument(message) + message = ExpandXcodeVariables(message, rule_input_dict) + messages.append(message) + + # Turn the list into a string that can be passed to a shell. + action_string = gyp.common.EncodePOSIXShellList(rule['action']) + + action = ExpandXcodeVariables(action_string, rule_input_dict) + actions.append(action) + + if len(concrete_outputs_all) > 0: + # TODO(mark): There's a possibilty for collision here. Consider + # target "t" rule "A_r" and target "t_A" rule "r". + makefile_name = '%s_%s.make' % (target_name, rule['rule_name']) + makefile_path = os.path.join(xcode_projects[build_file].path, + makefile_name) + # TODO(mark): try/close? Write to a temporary file and swap it only + # if it's got changes? + makefile = open(makefile_path, 'wb') + + # make will build the first target in the makefile by default. By + # convention, it's called "all". List all (or at least one) + # concrete output for each rule source as a prerequisite of the "all" + # target. + makefile.write('all: \\\n') + for concrete_output_index in \ + xrange(0, len(concrete_outputs_by_rule_source)): + # Only list the first (index [0]) concrete output of each input + # in the "all" target. Otherwise, a parallel make (-j > 1) would + # attempt to process each input multiple times simultaneously. + # Otherwise, "all" could just contain the entire list of + # concrete_outputs_all. + concrete_output = \ + concrete_outputs_by_rule_source[concrete_output_index][0] + if concrete_output_index == len(concrete_outputs_by_rule_source) - 1: + eol = '' + else: + eol = ' \\' + makefile.write(' %s%s\n' % (concrete_output, eol)) + + for (rule_source, concrete_outputs, message, action) in \ + zip(rule['rule_sources'], concrete_outputs_by_rule_source, + messages, actions): + makefile.write('\n') + + # Add a rule that declares it can build each concrete output of a + # rule source. Collect the names of the directories that are + # required. + concrete_output_dirs = [] + for concrete_output_index in xrange(0, len(concrete_outputs)): + concrete_output = concrete_outputs[concrete_output_index] + if concrete_output_index == 0: + bol = '' + else: + bol = ' ' + makefile.write('%s%s \\\n' % (bol, concrete_output)) + + concrete_output_dir = posixpath.dirname(concrete_output) + if (concrete_output_dir and + concrete_output_dir not in concrete_output_dirs): + concrete_output_dirs.append(concrete_output_dir) + + makefile.write(' : \\\n') + + # The prerequisites for this rule are the rule source itself and + # the set of additional rule inputs, if any. + prerequisites = [rule_source] + prerequisites.extend(rule.get('inputs', [])) + for prerequisite_index in xrange(0, len(prerequisites)): + prerequisite = prerequisites[prerequisite_index] + if prerequisite_index == len(prerequisites) - 1: + eol = '' + else: + eol = ' \\' + makefile.write(' %s%s\n' % (prerequisite, eol)) + + # Make sure that output directories exist before executing the rule + # action. + if len(concrete_output_dirs) > 0: + makefile.write('\t@mkdir -p "%s"\n' % + '" "'.join(concrete_output_dirs)) + + # The rule message and action have already had the necessary variable + # substitutions performed. + if message: + # Mark it with note: so Xcode picks it up in build output. + makefile.write('\t@echo note: %s\n' % message) + makefile.write('\t%s\n' % action) + + makefile.close() + + # It might be nice to ensure that needed output directories exist + # here rather than in each target in the Makefile, but that wouldn't + # work if there ever was a concrete output that had an input-dependent + # variable anywhere other than in the leaf position. + + # Don't declare any inputPaths or outputPaths. If they're present, + # Xcode will provide a slight optimization by only running the script + # phase if any output is missing or outdated relative to any input. + # Unfortunately, it will also assume that all outputs are touched by + # the script, and if the outputs serve as files in a compilation + # phase, they will be unconditionally rebuilt. Since make might not + # rebuild everything that could be declared here as an output, this + # extra compilation activity is unnecessary. With inputPaths and + # outputPaths not supplied, make will always be called, but it knows + # enough to not do anything when everything is up-to-date. + + # To help speed things up, pass -j COUNT to make so it does some work + # in parallel. Don't use ncpus because Xcode will build ncpus targets + # in parallel and if each target happens to have a rules step, there + # would be ncpus^2 things going. With a machine that has 2 quad-core + # Xeons, a build can quickly run out of processes based on + # scheduling/other tasks, and randomly failing builds are no good. + script = \ +"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)" +if [ "${JOB_COUNT}" -gt 4 ]; then + JOB_COUNT=4 +fi +exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" +exit 1 +""" % makefile_name + ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ + 'name': 'Rule "' + rule['rule_name'] + '"', + 'shellScript': script, + 'showEnvVarsInLog': 0, + }) + + if support_xct: + support_xct.AppendProperty('buildPhases', ssbp) + else: + # TODO(mark): this assumes too much knowledge of the internals of + # xcodeproj_file; some of these smarts should move into xcodeproj_file + # itself. + xct._properties['buildPhases'].insert(prebuild_index, ssbp) + prebuild_index = prebuild_index + 1 + + # Extra rule inputs also go into the project file. Concrete outputs were + # already added when they were computed. + groups = ['inputs', 'inputs_excluded'] + if skip_excluded_files: + groups = [x for x in groups if not x.endswith('_excluded')] + for group in groups: + for item in rule.get(group, []): + pbxp.AddOrGetFileInRootGroup(item) + + # Add "sources". + for source in spec.get('sources', []): + (source_root, source_extension) = posixpath.splitext(source) + if source_extension[1:] not in rules_by_ext: + # AddSourceToTarget will add the file to a root group if it's not + # already there. + AddSourceToTarget(source, pbxp, xct) + else: + pbxp.AddOrGetFileInRootGroup(source) + + # Add "mac_bundle_resources", "mac_framework_headers", and + # "mac_framework_private_headers" if it's a bundle of any type. + if is_bundle: + for resource in tgt_mac_bundle_resources: + (resource_root, resource_extension) = posixpath.splitext(resource) + if resource_extension[1:] not in rules_by_ext: + AddResourceToTarget(resource, pbxp, xct) + else: + pbxp.AddOrGetFileInRootGroup(resource) + + for header in spec.get('mac_framework_headers', []): + AddHeaderToTarget(header, pbxp, xct, True) + + for header in spec.get('mac_framework_private_headers', []): + AddHeaderToTarget(header, pbxp, xct, False) + + # Add "copies". + for copy_group in spec.get('copies', []): + pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ + 'name': 'Copy to ' + copy_group['destination'] + }, + parent=xct) + dest = copy_group['destination'] + if dest[0] not in ('/', '$'): + # Relative paths are relative to $(SRCROOT). + dest = '$(SRCROOT)/' + dest + pbxcp.SetDestination(dest) + + # TODO(mark): The usual comment about this knowing too much about + # gyp.xcodeproj_file internals applies. + xct._properties['buildPhases'].insert(prebuild_index, pbxcp) + + for file in copy_group['files']: + pbxcp.AddFile(file) + + # Excluded files can also go into the project file. + if not skip_excluded_files: + for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers', + 'mac_framework_private_headers']: + excluded_key = key + '_excluded' + for item in spec.get(excluded_key, []): + pbxp.AddOrGetFileInRootGroup(item) + + # So can "inputs" and "outputs" sections of "actions" groups. + groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded'] + if skip_excluded_files: + groups = [x for x in groups if not x.endswith('_excluded')] + for action in spec.get('actions', []): + for group in groups: + for item in action.get(group, []): + # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not + # sources. + if not item.startswith('$(BUILT_PRODUCTS_DIR)/'): + pbxp.AddOrGetFileInRootGroup(item) + + for postbuild in spec.get('postbuilds', []): + action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action']) + script = 'exec ' + action_string_sh + '\nexit 1\n' + + # Make the postbuild step depend on the output of ld or ar from this + # target. Apparently putting the script step after the link step isn't + # sufficient to ensure proper ordering in all cases. With an input + # declared but no outputs, the script step should run every time, as + # desired. + ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ + 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'], + 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"', + 'shellScript': script, + 'showEnvVarsInLog': 0, + }) + xct.AppendProperty('buildPhases', ssbp) + + # Add dependencies before libraries, because adding a dependency may imply + # adding a library. It's preferable to keep dependencies listed first + # during a link phase so that they can override symbols that would + # otherwise be provided by libraries, which will usually include system + # libraries. On some systems, ld is finicky and even requires the + # libraries to be ordered in such a way that unresolved symbols in + # earlier-listed libraries may only be resolved by later-listed libraries. + # The Mac linker doesn't work that way, but other platforms do, and so + # their linker invocations need to be constructed in this way. There's + # no compelling reason for Xcode's linker invocations to differ. + + if 'dependencies' in spec: + for dependency in spec['dependencies']: + xct.AddDependency(xcode_targets[dependency]) + # The support project also gets the dependencies (in case they are + # needed for the actions/rules to work). + if support_xct: + support_xct.AddDependency(xcode_targets[dependency]) + + if 'libraries' in spec: + for library in spec['libraries']: + xct.FrameworksPhase().AddFile(library) + # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary. + # I wish Xcode handled this automatically. + library_dir = posixpath.dirname(library) + if library_dir not in xcode_standard_library_dirs and ( + not xct.HasBuildSetting(_library_search_paths_var) or + library_dir not in xct.GetBuildSetting(_library_search_paths_var)): + xct.AppendBuildSetting(_library_search_paths_var, library_dir) + + for configuration_name in configuration_names: + configuration = spec['configurations'][configuration_name] + xcbc = xct.ConfigurationNamed(configuration_name) + for include_dir in configuration.get('mac_framework_dirs', []): + xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir) + for include_dir in configuration.get('include_dirs', []): + xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir) + if 'defines' in configuration: + for define in configuration['defines']: + set_define = EscapeXCodeArgument(define) + xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define) + if 'xcode_settings' in configuration: + for xck, xcv in configuration['xcode_settings'].iteritems(): + xcbc.SetBuildSetting(xck, xcv) + if 'xcode_config_file' in configuration: + config_ref = pbxp.AddOrGetFileInRootGroup( + configuration['xcode_config_file']) + xcbc.SetBaseConfiguration(config_ref) + + build_files = [] + for build_file, build_file_dict in data.iteritems(): + if build_file.endswith('.gyp'): + build_files.append(build_file) + + for build_file in build_files: + xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests) + + for build_file in build_files: + xcode_projects[build_file].Finalize2(xcode_targets, + xcode_target_to_target_dict) + + for build_file in build_files: + xcode_projects[build_file].Write() diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/input.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/input.py new file mode 100644 index 0000000000..74a96b6d8b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/input.py @@ -0,0 +1,2382 @@ +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from compiler.ast import Const +from compiler.ast import Dict +from compiler.ast import Discard +from compiler.ast import List +from compiler.ast import Module +from compiler.ast import Node +from compiler.ast import Stmt +import compiler +import copy +import gyp.common +import optparse +import os.path +import re +import shlex +import subprocess +import sys + + +# A list of types that are treated as linkable. +linkable_types = ['executable', 'shared_library', 'loadable_module'] + +# A list of sections that contain links to other targets. +dependency_sections = ['dependencies', 'export_dependent_settings'] + +# base_path_sections is a list of sections defined by GYP that contain +# pathnames. The generators can provide more keys, the two lists are merged +# into path_sections, but you should call IsPathSection instead of using either +# list directly. +base_path_sections = [ + 'destination', + 'files', + 'include_dirs', + 'inputs', + 'libraries', + 'outputs', + 'sources', +] +path_sections = [] + + +def IsPathSection(section): + # If section ends in one of these characters, it's applied to a section + # without the trailing characters. '/' is notably absent from this list, + # because there's no way for a regular expression to be treated as a path. + while section[-1:] in ('=', '+', '?', '!'): + section = section[0:-1] + + if section in path_sections or \ + section.endswith('_dir') or section.endswith('_dirs') or \ + section.endswith('_file') or section.endswith('_files') or \ + section.endswith('_path') or section.endswith('_paths'): + return True + return False + + +# base_non_configuraiton_keys is a list of key names that belong in the target +# itself and should not be propagated into its configurations. It is merged +# with a list that can come from the generator to +# create non_configuration_keys. +base_non_configuration_keys = [ + # Sections that must exist inside targets and not configurations. + 'actions', + 'configurations', + 'copies', + 'default_configuration', + 'dependencies', + 'dependencies_original', + 'link_languages', + 'libraries', + 'postbuilds', + 'product_dir', + 'product_extension', + 'product_name', + 'product_prefix', + 'rules', + 'run_as', + 'sources', + 'suppress_wildcard', + 'target_name', + 'toolset', + 'toolsets', + 'type', + 'variants', + + # Sections that can be found inside targets or configurations, but that + # should not be propagated from targets into their configurations. + 'variables', +] +non_configuration_keys = [] + +# Keys that do not belong inside a configuration dictionary. +invalid_configuration_keys = [ + 'actions', + 'all_dependent_settings', + 'configurations', + 'dependencies', + 'direct_dependent_settings', + 'libraries', + 'link_settings', + 'sources', + 'target_name', + 'type', +] + +# Controls how the generator want the build file paths. +absolute_build_file_paths = False + +# Controls whether or not the generator supports multiple toolsets. +multiple_toolsets = False + + +def GetIncludedBuildFiles(build_file_path, aux_data, included=None): + """Return a list of all build files included into build_file_path. + + The returned list will contain build_file_path as well as all other files + that it included, either directly or indirectly. Note that the list may + contain files that were included into a conditional section that evaluated + to false and was not merged into build_file_path's dict. + + aux_data is a dict containing a key for each build file or included build + file. Those keys provide access to dicts whose "included" keys contain + lists of all other files included by the build file. + + included should be left at its default None value by external callers. It + is used for recursion. + + The returned list will not contain any duplicate entries. Each build file + in the list will be relative to the current directory. + """ + + if included == None: + included = [] + + if build_file_path in included: + return included + + included.append(build_file_path) + + for included_build_file in aux_data[build_file_path].get('included', []): + GetIncludedBuildFiles(included_build_file, aux_data, included) + + return included + + +def CheckedEval(file_contents): + """Return the eval of a gyp file. + + The gyp file is restricted to dictionaries and lists only, and + repeated keys are not allowed. + + Note that this is slower than eval() is. + """ + + ast = compiler.parse(file_contents) + assert isinstance(ast, Module) + c1 = ast.getChildren() + assert c1[0] is None + assert isinstance(c1[1], Stmt) + c2 = c1[1].getChildren() + assert isinstance(c2[0], Discard) + c3 = c2[0].getChildren() + assert len(c3) == 1 + return CheckNode(c3[0], []) + + +def CheckNode(node, keypath): + if isinstance(node, Dict): + c = node.getChildren() + dict = {} + for n in range(0, len(c), 2): + assert isinstance(c[n], Const) + key = c[n].getChildren()[0] + if key in dict: + raise KeyError, "Key '" + key + "' repeated at level " + \ + repr(len(keypath) + 1) + " with key path '" + \ + '.'.join(keypath) + "'" + kp = list(keypath) # Make a copy of the list for descending this node. + kp.append(key) + dict[key] = CheckNode(c[n + 1], kp) + return dict + elif isinstance(node, List): + c = node.getChildren() + children = [] + for index, child in enumerate(c): + kp = list(keypath) # Copy list. + kp.append(repr(index)) + children.append(CheckNode(child, kp)) + return children + elif isinstance(node, Const): + return node.getChildren()[0] + else: + raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \ + "': " + repr(node) + + +def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes, + is_target, check): + if build_file_path in data: + return data[build_file_path] + + if os.path.exists(build_file_path): + build_file_contents = open(build_file_path).read() + else: + raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) + + build_file_data = None + try: + if check: + build_file_data = CheckedEval(build_file_contents) + else: + build_file_data = eval(build_file_contents, {'__builtins__': None}, + None) + except SyntaxError, e: + e.filename = build_file_path + raise + except Exception, e: + gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) + raise + + data[build_file_path] = build_file_data + aux_data[build_file_path] = {} + + # Scan for includes and merge them in. + try: + if is_target: + LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, + aux_data, variables, includes, check) + else: + LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, + aux_data, variables, None, check) + except Exception, e: + gyp.common.ExceptionAppend(e, + 'while reading includes of ' + build_file_path) + raise + + return build_file_data + + +def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, + variables, includes, check): + includes_list = [] + if includes != None: + includes_list.extend(includes) + if 'includes' in subdict: + for include in subdict['includes']: + # "include" is specified relative to subdict_path, so compute the real + # path to include by appending the provided "include" to the directory + # in which subdict_path resides. + relative_include = \ + os.path.normpath(os.path.join(os.path.dirname(subdict_path), include)) + includes_list.append(relative_include) + # Unhook the includes list, it's no longer needed. + del subdict['includes'] + + # Merge in the included files. + for include in includes_list: + if not 'included' in aux_data[subdict_path]: + aux_data[subdict_path]['included'] = [] + aux_data[subdict_path]['included'].append(include) + + gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'" % include) + + MergeDicts(subdict, + LoadOneBuildFile(include, data, aux_data, variables, None, + False, check), + subdict_path, include) + + # Recurse into subdictionaries. + for k, v in subdict.iteritems(): + if v.__class__ == dict: + LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables, + None, check) + elif v.__class__ == list: + LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables, + check) + + +# This recurses into lists so that it can look for dicts. +def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, + variables, check): + for item in sublist: + if item.__class__ == dict: + LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data, + variables, None, check) + elif item.__class__ == list: + LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, + variables, check) + +# Processes toolsets in all the targets. This recurses into condition entries +# since they can contain toolsets as well. +def ProcessToolsetsInDict(data): + if 'targets' in data: + target_list = data['targets'] + new_target_list = [] + for target in target_list: + # If this target already has an explicit 'toolset', and no 'toolsets' + # list, don't modify it further. + if 'toolset' in target and 'toolsets' not in target: + new_target_list.append(target) + continue + global multiple_toolsets + if multiple_toolsets: + toolsets = target.get('toolsets', ['target']) + else: + toolsets = ['target'] + # Make sure this 'toolsets' definition is only processed once. + if 'toolsets' in target: + del target['toolsets'] + if len(toolsets) > 0: + # Optimization: only do copies if more than one toolset is specified. + for build in toolsets[1:]: + new_target = copy.deepcopy(target) + new_target['toolset'] = build + new_target_list.append(new_target) + target['toolset'] = toolsets[0] + new_target_list.append(target) + data['targets'] = new_target_list + if 'conditions' in data: + for condition in data['conditions']: + if isinstance(condition, list): + for condition_dict in condition[1:]: + ProcessToolsetsInDict(condition_dict) + + +# TODO(mark): I don't love this name. It just means that it's going to load +# a build file that contains targets and is expected to provide a targets dict +# that contains the targets... +def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, + depth, check): + global absolute_build_file_paths + + # If depth is set, predefine the DEPTH variable to be a relative path from + # this build file's directory to the directory identified by depth. + if depth: + # TODO(dglazkov) The backslash/forward-slash replacement at the end is a + # temporary measure. This should really be addressed by keeping all paths + # in POSIX until actual project generation. + d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) + if d == '': + variables['DEPTH'] = '.' + else: + variables['DEPTH'] = d.replace('\\', '/') + + # If the generator needs absolue paths, then do so. + if absolute_build_file_paths: + build_file_path = os.path.abspath(build_file_path) + + if build_file_path in data['target_build_files']: + # Already loaded. + return + data['target_build_files'].add(build_file_path) + + gyp.DebugOutput(gyp.DEBUG_INCLUDES, + "Loading Target Build File '%s'" % build_file_path) + + build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables, + includes, True, check) + + # Store DEPTH for later use in generators. + build_file_data['_DEPTH'] = depth + + # Set up the included_files key indicating which .gyp files contributed to + # this target dict. + if 'included_files' in build_file_data: + raise KeyError, build_file_path + ' must not contain included_files key' + + included = GetIncludedBuildFiles(build_file_path, aux_data) + build_file_data['included_files'] = [] + for included_file in included: + # included_file is relative to the current directory, but it needs to + # be made relative to build_file_path's directory. + included_relative = \ + gyp.common.RelativePath(included_file, + os.path.dirname(build_file_path)) + build_file_data['included_files'].append(included_relative) + + # Do a first round of toolsets expansion so that conditions can be defined + # per toolset. + ProcessToolsetsInDict(build_file_data) + + # Apply "pre"/"early" variable expansions and condition evaluations. + ProcessVariablesAndConditionsInDict(build_file_data, False, variables, + build_file_path) + + # Since some toolsets might have been defined conditionally, perform + # a second round of toolsets expansion now. + ProcessToolsetsInDict(build_file_data) + + # Look at each project's target_defaults dict, and merge settings into + # targets. + if 'target_defaults' in build_file_data: + index = 0 + if 'targets' in build_file_data: + while index < len(build_file_data['targets']): + # This procedure needs to give the impression that target_defaults is + # used as defaults, and the individual targets inherit from that. + # The individual targets need to be merged into the defaults. Make + # a deep copy of the defaults for each target, merge the target dict + # as found in the input file into that copy, and then hook up the + # copy with the target-specific data merged into it as the replacement + # target dict. + old_target_dict = build_file_data['targets'][index] + new_target_dict = copy.deepcopy(build_file_data['target_defaults']) + MergeDicts(new_target_dict, old_target_dict, + build_file_path, build_file_path) + build_file_data['targets'][index] = new_target_dict + index = index + 1 + else: + raise Exception, \ + "Unable to find targets in build file %s" % build_file_path + + # No longer needed. + del build_file_data['target_defaults'] + + # Look for dependencies. This means that dependency resolution occurs + # after "pre" conditionals and variable expansion, but before "post" - + # in other words, you can't put a "dependencies" section inside a "post" + # conditional within a target. + + if 'targets' in build_file_data: + for target_dict in build_file_data['targets']: + if 'dependencies' not in target_dict: + continue + for dependency in target_dict['dependencies']: + other_build_file = \ + gyp.common.ResolveTarget(build_file_path, dependency, None)[0] + try: + LoadTargetBuildFile(other_build_file, data, aux_data, variables, + includes, depth, check) + except Exception, e: + gyp.common.ExceptionAppend( + e, 'while loading dependencies of %s' % build_file_path) + raise + + return data + + +# Look for the bracket that matches the first bracket seen in a +# string, and return the start and end as a tuple. For example, if +# the input is something like "<(foo <(bar)) blah", then it would +# return (1, 13), indicating the entire string except for the leading +# "<" and trailing " blah". +def FindEnclosingBracketGroup(input): + brackets = { '}': '{', + ']': '[', + ')': '(', } + stack = [] + count = 0 + start = -1 + for char in input: + if char in brackets.values(): + stack.append(char) + if start == -1: + start = count + if char in brackets.keys(): + try: + last_bracket = stack.pop() + except IndexError: + return (-1, -1) + if last_bracket != brackets[char]: + return (-1, -1) + if len(stack) == 0: + return (start, count + 1) + count = count + 1 + return (-1, -1) + + +canonical_int_re = re.compile('^(0|-?[1-9][0-9]*)$') + + +def IsStrCanonicalInt(string): + """Returns True if |string| is in its canonical integer form. + + The canonical form is such that str(int(string)) == string. + """ + if not isinstance(string, str) or not canonical_int_re.match(string): + return False + + return True + + +# This matches things like "<(asdf)", "(?P<(?:(?:!?@?)|\|)?)' + '(?P[-a-zA-Z0-9_.]+)?' + '\((?P\s*\[?)' + '(?P.*?)(\]?)\))') + +# This matches the same as early_variable_re, but with '>' instead of '<'. +late_variable_re = re.compile( + '(?P(?P>(?:(?:!?@?)|\|)?)' + '(?P[-a-zA-Z0-9_.]+)?' + '\((?P\s*\[?)' + '(?P.*?)(\]?)\))') + +# Global cache of results from running commands so they don't have to be run +# more then once. +cached_command_results = {} + + +def FixupPlatformCommand(cmd): + if sys.platform == 'win32': + if type(cmd) == list: + cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:] + else: + cmd = re.sub('^cat ', 'type ', cmd) + return cmd + + +def ExpandVariables(input, is_late, variables, build_file): + # Look for the pattern that gets expanded into variables + if not is_late: + variable_re = early_variable_re + expansion_symbol = '<' + else: + variable_re = late_variable_re + expansion_symbol = '>' + + input_str = str(input) + # Do a quick scan to determine if an expensive regex search is warranted. + if expansion_symbol in input_str: + # Get the entire list of matches as a list of MatchObject instances. + # (using findall here would return strings instead of MatchObjects). + matches = [match for match in variable_re.finditer(input_str)] + else: + matches = None + + output = input_str + if matches: + # Reverse the list of matches so that replacements are done right-to-left. + # That ensures that earlier replacements won't mess up the string in a + # way that causes later calls to find the earlier substituted text instead + # of what's intended for replacement. + matches.reverse() + for match_group in matches: + match = match_group.groupdict() + gyp.DebugOutput(gyp.DEBUG_VARIABLES, + "Matches: %s" % repr(match)) + # match['replace'] is the substring to look for, match['type'] + # is the character code for the replacement type (< > ! <| >| <@ + # >@ !@), match['is_array'] contains a '[' for command + # arrays, and match['content'] is the name of the variable (< >) + # or command to run (!). match['command_string'] is an optional + # command string. Currently, only 'pymod_do_main' is supported. + + # run_command is true if a ! variant is used. + run_command = '!' in match['type'] + command_string = match['command_string'] + + # file_list is true if a | variant is used. + file_list = '|' in match['type'] + + # Capture these now so we can adjust them later. + replace_start = match_group.start('replace') + replace_end = match_group.end('replace') + + # Find the ending paren, and re-evaluate the contained string. + (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:]) + + # Adjust the replacement range to match the entire command + # found by FindEnclosingBracketGroup (since the variable_re + # probably doesn't match the entire command if it contained + # nested variables). + replace_end = replace_start + c_end + + # Find the "real" replacement, matching the appropriate closing + # paren, and adjust the replacement start and end. + replacement = input_str[replace_start:replace_end] + + # Figure out what the contents of the variable parens are. + contents_start = replace_start + c_start + 1 + contents_end = replace_end - 1 + contents = input_str[contents_start:contents_end] + + # Do filter substitution now for <|(). + # Admittedly, this is different than the evaluation order in other + # contexts. However, since filtration has no chance to run on <|(), + # this seems like the only obvious way to give them access to filters. + if file_list: + processed_variables = copy.deepcopy(variables) + ProcessListFiltersInDict(contents, processed_variables) + # Recurse to expand variables in the contents + contents = ExpandVariables(contents, is_late, + processed_variables, build_file) + else: + # Recurse to expand variables in the contents + contents = ExpandVariables(contents, is_late, variables, build_file) + + # Strip off leading/trailing whitespace so that variable matches are + # simpler below (and because they are rarely needed). + contents = contents.strip() + + # expand_to_list is true if an @ variant is used. In that case, + # the expansion should result in a list. Note that the caller + # is to be expecting a list in return, and not all callers do + # because not all are working in list context. Also, for list + # expansions, there can be no other text besides the variable + # expansion in the input string. + expand_to_list = '@' in match['type'] and input_str == replacement + + if run_command or file_list: + # Find the build file's directory, so commands can be run or file lists + # generated relative to it. + build_file_dir = os.path.dirname(build_file) + if build_file_dir == '': + # If build_file is just a leaf filename indicating a file in the + # current directory, build_file_dir might be an empty string. Set + # it to None to signal to subprocess.Popen that it should run the + # command in the current directory. + build_file_dir = None + + # Support <|(listfile.txt ...) which generates a file + # containing items from a gyp list, generated at gyp time. + # This works around actions/rules which have more inputs than will + # fit on the command line. + if file_list: + if type(contents) == list: + contents_list = contents + else: + contents_list = contents.split(' ') + replacement = contents_list[0] + path = replacement + if not os.path.isabs(path): + path = os.path.join(build_file_dir, path) + f = gyp.common.WriteOnDiff(path) + for i in contents_list[1:]: + f.write('%s\n' % i) + f.close() + + elif run_command: + use_shell = True + if match['is_array']: + contents = eval(contents) + use_shell = False + + # Check for a cached value to avoid executing commands, or generating + # file lists more than once. + # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is + # possible that the command being invoked depends on the current + # directory. For that case the syntax needs to be extended so that the + # directory is also used in cache_key (it becomes a tuple). + # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory, + # someone could author a set of GYP files where each time the command + # is invoked it produces different output by design. When the need + # arises, the syntax should be extended to support no caching off a + # command's output so it is run every time. + cache_key = str(contents) + cached_value = cached_command_results.get(cache_key, None) + if cached_value is None: + gyp.DebugOutput(gyp.DEBUG_VARIABLES, + "Executing command '%s' in directory '%s'" % + (contents,build_file_dir)) + + replacement = '' + + if command_string == 'pymod_do_main': + # ', 'eval') + + if eval(ast_code, {'__builtins__': None}, variables): + merge_dict = true_dict + else: + merge_dict = false_dict + except SyntaxError, e: + syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' + 'at character %d.' % + (str(e.args[0]), e.text, build_file, e.offset), + e.filename, e.lineno, e.offset, e.text) + raise syntax_error + except NameError, e: + gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % + (cond_expr_expanded, build_file)) + raise + + if merge_dict != None: + # Expand variables and nested conditinals in the merge_dict before + # merging it. + ProcessVariablesAndConditionsInDict(merge_dict, is_late, + variables, build_file) + + MergeDicts(the_dict, merge_dict, build_file, build_file) + + +def LoadAutomaticVariablesFromDict(variables, the_dict): + # Any keys with plain string values in the_dict become automatic variables. + # The variable name is the key name with a "_" character prepended. + for key, value in the_dict.iteritems(): + if isinstance(value, str) or isinstance(value, int) or \ + isinstance(value, list): + variables['_' + key] = value + + +def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): + # Any keys in the_dict's "variables" dict, if it has one, becomes a + # variable. The variable name is the key name in the "variables" dict. + # Variables that end with the % character are set only if they are unset in + # the variables dict. the_dict_key is the name of the key that accesses + # the_dict in the_dict's parent dict. If the_dict's parent is not a dict + # (it could be a list or it could be parentless because it is a root dict), + # the_dict_key will be None. + for key, value in the_dict.get('variables', {}).iteritems(): + if not isinstance(value, str) and not isinstance(value, int) and \ + not isinstance(value, list): + continue + + if key.endswith('%'): + variable_name = key[:-1] + if variable_name in variables: + # If the variable is already set, don't set it. + continue + if the_dict_key is 'variables' and variable_name in the_dict: + # If the variable is set without a % in the_dict, and the_dict is a + # variables dict (making |variables| a varaibles sub-dict of a + # variables dict), use the_dict's definition. + value = the_dict[variable_name] + else: + variable_name = key + + variables[variable_name] = value + + +def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in, + build_file, the_dict_key=None): + """Handle all variable and command expansion and conditional evaluation. + + This function is the public entry point for all variable expansions and + conditional evaluations. The variables_in dictionary will not be modified + by this function. + """ + + # Make a copy of the variables_in dict that can be modified during the + # loading of automatics and the loading of the variables dict. + variables = variables_in.copy() + LoadAutomaticVariablesFromDict(variables, the_dict) + + if 'variables' in the_dict: + # Make sure all the local variables are added to the variables + # list before we process them so that you can reference one + # variable from another. They will be fully expanded by recursion + # in ExpandVariables. + for key, value in the_dict['variables'].iteritems(): + variables[key] = value + + # Handle the associated variables dict first, so that any variable + # references within can be resolved prior to using them as variables. + # Pass a copy of the variables dict to avoid having it be tainted. + # Otherwise, it would have extra automatics added for everything that + # should just be an ordinary variable in this scope. + ProcessVariablesAndConditionsInDict(the_dict['variables'], is_late, + variables, build_file, 'variables') + + LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) + + for key, value in the_dict.iteritems(): + # Skip "variables", which was already processed if present. + if key != 'variables' and isinstance(value, str): + expanded = ExpandVariables(value, is_late, variables, build_file) + if not isinstance(expanded, str) and not isinstance(expanded, int): + raise ValueError, \ + 'Variable expansion in this context permits str and int ' + \ + 'only, found ' + expanded.__class__.__name__ + ' for ' + key + the_dict[key] = expanded + + # Variable expansion may have resulted in changes to automatics. Reload. + # TODO(mark): Optimization: only reload if no changes were made. + variables = variables_in.copy() + LoadAutomaticVariablesFromDict(variables, the_dict) + LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) + + # Process conditions in this dict. This is done after variable expansion + # so that conditions may take advantage of expanded variables. For example, + # if the_dict contains: + # {'type': '<(library_type)', + # 'conditions': [['_type=="static_library"', { ... }]]}, + # _type, as used in the condition, will only be set to the value of + # library_type if variable expansion is performed before condition + # processing. However, condition processing should occur prior to recursion + # so that variables (both automatic and "variables" dict type) may be + # adjusted by conditions sections, merged into the_dict, and have the + # intended impact on contained dicts. + # + # This arrangement means that a "conditions" section containing a "variables" + # section will only have those variables effective in subdicts, not in + # the_dict. The workaround is to put a "conditions" section within a + # "variables" section. For example: + # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]], + # 'defines': ['<(define)'], + # 'my_subdict': {'defines': ['<(define)']}}, + # will not result in "IS_MAC" being appended to the "defines" list in the + # current scope but would result in it being appended to the "defines" list + # within "my_subdict". By comparison: + # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]}, + # 'defines': ['<(define)'], + # 'my_subdict': {'defines': ['<(define)']}}, + # will append "IS_MAC" to both "defines" lists. + + # Evaluate conditions sections, allowing variable expansions within them + # as well as nested conditionals. This will process a 'conditions' or + # 'target_conditions' section, perform appropriate merging and recursive + # conditional and variable processing, and then remove the conditions section + # from the_dict if it is present. + ProcessConditionsInDict(the_dict, is_late, variables, build_file) + + # Conditional processing may have resulted in changes to automatics or the + # variables dict. Reload. + variables = variables_in.copy() + LoadAutomaticVariablesFromDict(variables, the_dict) + LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) + + # Recurse into child dicts, or process child lists which may result in + # further recursion into descendant dicts. + for key, value in the_dict.iteritems(): + # Skip "variables" and string values, which were already processed if + # present. + if key == 'variables' or isinstance(value, str): + continue + if isinstance(value, dict): + # Pass a copy of the variables dict so that subdicts can't influence + # parents. + ProcessVariablesAndConditionsInDict(value, is_late, variables, + build_file, key) + elif isinstance(value, list): + # The list itself can't influence the variables dict, and + # ProcessVariablesAndConditionsInList will make copies of the variables + # dict if it needs to pass it to something that can influence it. No + # copy is necessary here. + ProcessVariablesAndConditionsInList(value, is_late, variables, + build_file) + elif not isinstance(value, int): + raise TypeError, 'Unknown type ' + value.__class__.__name__ + \ + ' for ' + key + + +def ProcessVariablesAndConditionsInList(the_list, is_late, variables, + build_file): + # Iterate using an index so that new values can be assigned into the_list. + index = 0 + while index < len(the_list): + item = the_list[index] + if isinstance(item, dict): + # Make a copy of the variables dict so that it won't influence anything + # outside of its own scope. + ProcessVariablesAndConditionsInDict(item, is_late, variables, build_file) + elif isinstance(item, list): + ProcessVariablesAndConditionsInList(item, is_late, variables, build_file) + elif isinstance(item, str): + expanded = ExpandVariables(item, is_late, variables, build_file) + if isinstance(expanded, str) or isinstance(expanded, int): + the_list[index] = expanded + elif isinstance(expanded, list): + del the_list[index] + for expanded_item in expanded: + the_list.insert(index, expanded_item) + index = index + 1 + + # index now identifies the next item to examine. Continue right now + # without falling into the index increment below. + continue + else: + raise ValueError, \ + 'Variable expansion in this context permits strings and ' + \ + 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \ + index + elif not isinstance(item, int): + raise TypeError, 'Unknown type ' + item.__class__.__name__ + \ + ' at index ' + index + index = index + 1 + + +def BuildTargetsDict(data): + """Builds a dict mapping fully-qualified target names to their target dicts. + + |data| is a dict mapping loaded build files by pathname relative to the + current directory. Values in |data| are build file contents. For each + |data| value with a "targets" key, the value of the "targets" key is taken + as a list containing target dicts. Each target's fully-qualified name is + constructed from the pathname of the build file (|data| key) and its + "target_name" property. These fully-qualified names are used as the keys + in the returned dict. These keys provide access to the target dicts, + the dicts in the "targets" lists. + """ + + targets = {} + for build_file in data['target_build_files']: + for target in data[build_file].get('targets', []): + target_name = gyp.common.QualifiedTarget(build_file, + target['target_name'], + target['toolset']) + if target_name in targets: + raise KeyError, 'Duplicate target definitions for ' + target_name + targets[target_name] = target + + return targets + + +def QualifyDependencies(targets): + """Make dependency links fully-qualified relative to the current directory. + + |targets| is a dict mapping fully-qualified target names to their target + dicts. For each target in this dict, keys known to contain dependency + links are examined, and any dependencies referenced will be rewritten + so that they are fully-qualified and relative to the current directory. + All rewritten dependencies are suitable for use as keys to |targets| or a + similar dict. + """ + + all_dependency_sections = [dep + op + for dep in dependency_sections + for op in ('', '!', '/')] + + for target, target_dict in targets.iteritems(): + target_build_file = gyp.common.BuildFile(target) + toolset = target_dict['toolset'] + for dependency_key in all_dependency_sections: + dependencies = target_dict.get(dependency_key, []) + for index in xrange(0, len(dependencies)): + dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( + target_build_file, dependencies[index], toolset) + global multiple_toolsets + if not multiple_toolsets: + # Ignore toolset specification in the dependency if it is specified. + dep_toolset = toolset + dependency = gyp.common.QualifiedTarget(dep_file, + dep_target, + dep_toolset) + dependencies[index] = dependency + + # Make sure anything appearing in a list other than "dependencies" also + # appears in the "dependencies" list. + if dependency_key != 'dependencies' and \ + dependency not in target_dict['dependencies']: + raise KeyError, 'Found ' + dependency + ' in ' + dependency_key + \ + ' of ' + target + ', but not in dependencies' + + +def ExpandWildcardDependencies(targets, data): + """Expands dependencies specified as build_file:*. + + For each target in |targets|, examines sections containing links to other + targets. If any such section contains a link of the form build_file:*, it + is taken as a wildcard link, and is expanded to list each target in + build_file. The |data| dict provides access to build file dicts. + + Any target that does not wish to be included by wildcard can provide an + optional "suppress_wildcard" key in its target dict. When present and + true, a wildcard dependency link will not include such targets. + + All dependency names, including the keys to |targets| and the values in each + dependency list, must be qualified when this function is called. + """ + + for target, target_dict in targets.iteritems(): + toolset = target_dict['toolset'] + target_build_file = gyp.common.BuildFile(target) + for dependency_key in dependency_sections: + dependencies = target_dict.get(dependency_key, []) + + # Loop this way instead of "for dependency in" or "for index in xrange" + # because the dependencies list will be modified within the loop body. + index = 0 + while index < len(dependencies): + (dependency_build_file, dependency_target, dependency_toolset) = \ + gyp.common.ParseQualifiedTarget(dependencies[index]) + if dependency_target != '*' and dependency_toolset != '*': + # Not a wildcard. Keep it moving. + index = index + 1 + continue + + if dependency_build_file == target_build_file: + # It's an error for a target to depend on all other targets in + # the same file, because a target cannot depend on itself. + raise KeyError, 'Found wildcard in ' + dependency_key + ' of ' + \ + target + ' referring to same build file' + + # Take the wildcard out and adjust the index so that the next + # dependency in the list will be processed the next time through the + # loop. + del dependencies[index] + index = index - 1 + + # Loop through the targets in the other build file, adding them to + # this target's list of dependencies in place of the removed + # wildcard. + dependency_target_dicts = data[dependency_build_file]['targets'] + for dependency_target_dict in dependency_target_dicts: + if int(dependency_target_dict.get('suppress_wildcard', False)): + continue + dependency_target_name = dependency_target_dict['target_name'] + if (dependency_target != '*' and + dependency_target != dependency_target_name): + continue + dependency_target_toolset = dependency_target_dict['toolset'] + if (dependency_toolset != '*' and + dependency_toolset != dependency_target_toolset): + continue + dependency = gyp.common.QualifiedTarget(dependency_build_file, + dependency_target_name, + dependency_target_toolset) + index = index + 1 + dependencies.insert(index, dependency) + + index = index + 1 + + +class DependencyGraphNode(object): + """ + + Attributes: + ref: A reference to an object that this DependencyGraphNode represents. + dependencies: List of DependencyGraphNodes on which this one depends. + dependents: List of DependencyGraphNodes that depend on this one. + """ + + class CircularException(Exception): + pass + + def __init__(self, ref): + self.ref = ref + self.dependencies = [] + self.dependents = [] + + def FlattenToList(self): + # flat_list is the sorted list of dependencies - actually, the list items + # are the "ref" attributes of DependencyGraphNodes. Every target will + # appear in flat_list after all of its dependencies, and before all of its + # dependents. + flat_list = [] + + # in_degree_zeros is the list of DependencyGraphNodes that have no + # dependencies not in flat_list. Initially, it is a copy of the children + # of this node, because when the graph was built, nodes with no + # dependencies were made implicit dependents of the root node. + in_degree_zeros = self.dependents[:] + + while in_degree_zeros: + # Nodes in in_degree_zeros have no dependencies not in flat_list, so they + # can be appended to flat_list. Take these nodes out of in_degree_zeros + # as work progresses, so that the next node to process from the list can + # always be accessed at a consistent position. + node = in_degree_zeros.pop(0) + flat_list.append(node.ref) + + # Look at dependents of the node just added to flat_list. Some of them + # may now belong in in_degree_zeros. + for node_dependent in node.dependents: + is_in_degree_zero = True + for node_dependent_dependency in node_dependent.dependencies: + if not node_dependent_dependency.ref in flat_list: + # The dependent one or more dependencies not in flat_list. There + # will be more chances to add it to flat_list when examining + # it again as a dependent of those other dependencies, provided + # that there are no cycles. + is_in_degree_zero = False + break + + if is_in_degree_zero: + # All of the dependent's dependencies are already in flat_list. Add + # it to in_degree_zeros where it will be processed in a future + # iteration of the outer loop. + in_degree_zeros.append(node_dependent) + + return flat_list + + def DirectDependencies(self, dependencies=None): + """Returns a list of just direct dependencies.""" + if dependencies == None: + dependencies = [] + + for dependency in self.dependencies: + # Check for None, corresponding to the root node. + if dependency.ref != None and dependency.ref not in dependencies: + dependencies.append(dependency.ref) + + return dependencies + + def _AddImportedDependencies(self, targets, dependencies=None): + """Given a list of direct dependencies, adds indirect dependencies that + other dependencies have declared to export their settings. + + This method does not operate on self. Rather, it operates on the list + of dependencies in the |dependencies| argument. For each dependency in + that list, if any declares that it exports the settings of one of its + own dependencies, those dependencies whose settings are "passed through" + are added to the list. As new items are added to the list, they too will + be processed, so it is possible to import settings through multiple levels + of dependencies. + + This method is not terribly useful on its own, it depends on being + "primed" with a list of direct dependencies such as one provided by + DirectDependencies. DirectAndImportedDependencies is intended to be the + public entry point. + """ + + if dependencies == None: + dependencies = [] + + index = 0 + while index < len(dependencies): + dependency = dependencies[index] + dependency_dict = targets[dependency] + # Add any dependencies whose settings should be imported to the list + # if not already present. Newly-added items will be checked for + # their own imports when the list iteration reaches them. + # Rather than simply appending new items, insert them after the + # dependency that exported them. This is done to more closely match + # the depth-first method used by DeepDependencies. + add_index = 1 + for imported_dependency in \ + dependency_dict.get('export_dependent_settings', []): + if imported_dependency not in dependencies: + dependencies.insert(index + add_index, imported_dependency) + add_index = add_index + 1 + index = index + 1 + + return dependencies + + def DirectAndImportedDependencies(self, targets, dependencies=None): + """Returns a list of a target's direct dependencies and all indirect + dependencies that a dependency has advertised settings should be exported + through the dependency for. + """ + + dependencies = self.DirectDependencies(dependencies) + return self._AddImportedDependencies(targets, dependencies) + + def DeepDependencies(self, dependencies=None): + """Returns a list of all of a target's dependencies, recursively.""" + if dependencies == None: + dependencies = [] + + for dependency in self.dependencies: + # Check for None, corresponding to the root node. + if dependency.ref != None and dependency.ref not in dependencies: + dependencies.append(dependency.ref) + dependency.DeepDependencies(dependencies) + + return dependencies + + def LinkDependencies(self, targets, dependencies=None, initial=True): + """Returns a list of dependency targets that are linked into this target. + + This function has a split personality, depending on the setting of + |initial|. Outside callers should always leave |initial| at its default + setting. + + When adding a target to the list of dependencies, this function will + recurse into itself with |initial| set to False, to collect dependencies + that are linked into the linkable target for which the list is being built. + """ + if dependencies == None: + dependencies = [] + + # Check for None, corresponding to the root node. + if self.ref == None: + return dependencies + + # It's kind of sucky that |targets| has to be passed into this function, + # but that's presently the easiest way to access the target dicts so that + # this function can find target types. + + if not 'target_name' in targets[self.ref]: + raise Exception("Missing 'target_name' field in target.") + + try: + target_type = targets[self.ref]['type'] + except KeyError, e: + raise Exception("Missing 'type' field in target %s" % + targets[self.ref]['target_name']) + + is_linkable = target_type in linkable_types + + if initial and not is_linkable: + # If this is the first target being examined and it's not linkable, + # return an empty list of link dependencies, because the link + # dependencies are intended to apply to the target itself (initial is + # True) and this target won't be linked. + return dependencies + + # Don't traverse 'none' targets if explicitly excluded. + if (target_type == 'none' and + not targets[self.ref].get('dependencies_traverse', True)): + if self.ref not in dependencies: + dependencies.append(self.ref) + return dependencies + + # Executables and loadable modules are already fully and finally linked. + # Nothing else can be a link dependency of them, there can only be + # dependencies in the sense that a dependent target might run an + # executable or load the loadable_module. + if not initial and target_type in ('executable', 'loadable_module'): + return dependencies + + # The target is linkable, add it to the list of link dependencies. + if self.ref not in dependencies: + dependencies.append(self.ref) + if initial or not is_linkable: + # If this is a subsequent target and it's linkable, don't look any + # further for linkable dependencies, as they'll already be linked into + # this target linkable. Always look at dependencies of the initial + # target, and always look at dependencies of non-linkables. + for dependency in self.dependencies: + dependency.LinkDependencies(targets, dependencies, False) + + return dependencies + + +def BuildDependencyList(targets): + # Create a DependencyGraphNode for each target. Put it into a dict for easy + # access. + dependency_nodes = {} + for target, spec in targets.iteritems(): + if not target in dependency_nodes: + dependency_nodes[target] = DependencyGraphNode(target) + + # Set up the dependency links. Targets that have no dependencies are treated + # as dependent on root_node. + root_node = DependencyGraphNode(None) + for target, spec in targets.iteritems(): + target_node = dependency_nodes[target] + target_build_file = gyp.common.BuildFile(target) + if not 'dependencies' in spec or len(spec['dependencies']) == 0: + target_node.dependencies = [root_node] + root_node.dependents.append(target_node) + else: + dependencies = spec['dependencies'] + for index in xrange(0, len(dependencies)): + try: + dependency = dependencies[index] + dependency_node = dependency_nodes[dependency] + target_node.dependencies.append(dependency_node) + dependency_node.dependents.append(target_node) + except KeyError, e: + gyp.common.ExceptionAppend(e, + 'while trying to load target %s' % target) + raise + + flat_list = root_node.FlattenToList() + + # If there's anything left unvisited, there must be a circular dependency + # (cycle). If you need to figure out what's wrong, look for elements of + # targets that are not in flat_list. + if len(flat_list) != len(targets): + raise DependencyGraphNode.CircularException, \ + 'Some targets not reachable, cycle in dependency graph detected' + + return [dependency_nodes, flat_list] + + +def VerifyNoGYPFileCircularDependencies(targets): + # Create a DependencyGraphNode for each gyp file containing a target. Put + # it into a dict for easy access. + dependency_nodes = {} + for target in targets.iterkeys(): + build_file = gyp.common.BuildFile(target) + if not build_file in dependency_nodes: + dependency_nodes[build_file] = DependencyGraphNode(build_file) + + # Set up the dependency links. + for target, spec in targets.iteritems(): + build_file = gyp.common.BuildFile(target) + build_file_node = dependency_nodes[build_file] + target_dependencies = spec.get('dependencies', []) + for dependency in target_dependencies: + try: + dependency_build_file = gyp.common.BuildFile(dependency) + if dependency_build_file == build_file: + # A .gyp file is allowed to refer back to itself. + continue + dependency_node = dependency_nodes[dependency_build_file] + if dependency_node not in build_file_node.dependencies: + build_file_node.dependencies.append(dependency_node) + dependency_node.dependents.append(build_file_node) + except KeyError, e: + gyp.common.ExceptionAppend( + e, 'while computing dependencies of .gyp file %s' % build_file) + raise + + # Files that have no dependencies are treated as dependent on root_node. + root_node = DependencyGraphNode(None) + for build_file_node in dependency_nodes.itervalues(): + if len(build_file_node.dependencies) == 0: + build_file_node.dependencies.append(root_node) + root_node.dependents.append(build_file_node) + + flat_list = root_node.FlattenToList() + + # If there's anything left unvisited, there must be a circular dependency + # (cycle). + if len(flat_list) != len(dependency_nodes): + bad_files = [] + for file in dependency_nodes.iterkeys(): + if not file in flat_list: + bad_files.append(file) + raise DependencyGraphNode.CircularException, \ + 'Some files not reachable, cycle in .gyp file dependency graph ' + \ + 'detected involving some or all of: ' + \ + ' '.join(bad_files) + + +def DoDependentSettings(key, flat_list, targets, dependency_nodes): + # key should be one of all_dependent_settings, direct_dependent_settings, + # or link_settings. + + for target in flat_list: + target_dict = targets[target] + build_file = gyp.common.BuildFile(target) + + if key == 'all_dependent_settings': + dependencies = dependency_nodes[target].DeepDependencies() + elif key == 'direct_dependent_settings': + dependencies = \ + dependency_nodes[target].DirectAndImportedDependencies(targets) + elif key == 'link_settings': + dependencies = dependency_nodes[target].LinkDependencies(targets) + else: + raise KeyError, "DoDependentSettings doesn't know how to determine " + \ + 'dependencies for ' + key + + for dependency in dependencies: + dependency_dict = targets[dependency] + if not key in dependency_dict: + continue + dependency_build_file = gyp.common.BuildFile(dependency) + MergeDicts(target_dict, dependency_dict[key], + build_file, dependency_build_file) + + +def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, + sort_dependencies): + # Recompute target "dependencies" properties. For each static library + # target, remove "dependencies" entries referring to other static libraries, + # unless the dependency has the "hard_dependency" attribute set. For each + # linkable target, add a "dependencies" entry referring to all of the + # target's computed list of link dependencies (including static libraries + # if no such entry is already present. + for target in flat_list: + target_dict = targets[target] + target_type = target_dict['type'] + + if target_type == 'static_library': + if not 'dependencies' in target_dict: + continue + + target_dict['dependencies_original'] = target_dict.get( + 'dependencies', [])[:] + + # A static library should not depend on another static library unless + # the dependency relationship is "hard," which should only be done when + # a dependent relies on some side effect other than just the build + # product, like a rule or action output. Further, if a target has a + # non-hard dependency, but that dependency exports a hard dependency, + # the non-hard dependency can safely be removed, but the exported hard + # dependency must be added to the target to keep the same dependency + # ordering. + dependencies = \ + dependency_nodes[target].DirectAndImportedDependencies(targets) + index = 0 + while index < len(dependencies): + dependency = dependencies[index] + dependency_dict = targets[dependency] + + # Remove every non-hard static library dependency and remove every + # non-static library dependency that isn't a direct dependency. + if (dependency_dict['type'] == 'static_library' and \ + not dependency_dict.get('hard_dependency', False)) or \ + (dependency_dict['type'] != 'static_library' and \ + not dependency in target_dict['dependencies']): + # Take the dependency out of the list, and don't increment index + # because the next dependency to analyze will shift into the index + # formerly occupied by the one being removed. + del dependencies[index] + else: + index = index + 1 + + # Update the dependencies. If the dependencies list is empty, it's not + # needed, so unhook it. + if len(dependencies) > 0: + target_dict['dependencies'] = dependencies + else: + del target_dict['dependencies'] + + elif target_type in linkable_types: + # Get a list of dependency targets that should be linked into this + # target. Add them to the dependencies list if they're not already + # present. + + link_dependencies = dependency_nodes[target].LinkDependencies(targets) + for dependency in link_dependencies: + if dependency == target: + continue + if not 'dependencies' in target_dict: + target_dict['dependencies'] = [] + if not dependency in target_dict['dependencies']: + target_dict['dependencies'].append(dependency) + # Sort the dependencies list in the order from dependents to dependencies. + # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D. + # Note: flat_list is already sorted in the order from dependencies to + # dependents. + if sort_dependencies and 'dependencies' in target_dict: + target_dict['dependencies'] = [dep for dep in reversed(flat_list) + if dep in target_dict['dependencies']] + + +# Initialize this here to speed up MakePathRelative. +exception_re = re.compile(r'''["']?[-/$<>]''') + + +def MakePathRelative(to_file, fro_file, item): + # If item is a relative path, it's relative to the build file dict that it's + # coming from. Fix it up to make it relative to the build file dict that + # it's going into. + # Exception: any |item| that begins with these special characters is + # returned without modification. + # / Used when a path is already absolute (shortcut optimization; + # such paths would be returned as absolute anyway) + # $ Used for build environment variables + # - Used for some build environment flags (such as -lapr-1 in a + # "libraries" section) + # < Used for our own variable and command expansions (see ExpandVariables) + # > Used for our own variable and command expansions (see ExpandVariables) + # + # "/' Used when a value is quoted. If these are present, then we + # check the second character instead. + # + if to_file == fro_file or exception_re.match(item): + return item + else: + # TODO(dglazkov) The backslash/forward-slash replacement at the end is a + # temporary measure. This should really be addressed by keeping all paths + # in POSIX until actual project generation. + ret = os.path.normpath(os.path.join( + gyp.common.RelativePath(os.path.dirname(fro_file), + os.path.dirname(to_file)), + item)).replace('\\', '/') + if item[-1] == '/': + ret += '/' + return ret + +def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): + def is_hashable(x): + try: + hash(x) + except TypeError: + return False + return True + # If x is hashable, returns whether x is in s. Else returns whether x is in l. + def is_in_set_or_list(x, s, l): + if is_hashable(x): + return x in s + return x in l + + prepend_index = 0 + + # Make membership testing of hashables in |to| (in particular, strings) + # faster. + hashable_to_set = set([x for x in to if is_hashable(x)]) + + for item in fro: + singleton = False + if isinstance(item, str) or isinstance(item, int): + # The cheap and easy case. + if is_paths: + to_item = MakePathRelative(to_file, fro_file, item) + else: + to_item = item + + if not isinstance(item, str) or not item.startswith('-'): + # Any string that doesn't begin with a "-" is a singleton - it can + # only appear once in a list, to be enforced by the list merge append + # or prepend. + singleton = True + elif isinstance(item, dict): + # Make a copy of the dictionary, continuing to look for paths to fix. + # The other intelligent aspects of merge processing won't apply because + # item is being merged into an empty dict. + to_item = {} + MergeDicts(to_item, item, to_file, fro_file) + elif isinstance(item, list): + # Recurse, making a copy of the list. If the list contains any + # descendant dicts, path fixing will occur. Note that here, custom + # values for is_paths and append are dropped; those are only to be + # applied to |to| and |fro|, not sublists of |fro|. append shouldn't + # matter anyway because the new |to_item| list is empty. + to_item = [] + MergeLists(to_item, item, to_file, fro_file) + else: + raise TypeError, \ + 'Attempt to merge list item of unsupported type ' + \ + item.__class__.__name__ + + if append: + # If appending a singleton that's already in the list, don't append. + # This ensures that the earliest occurrence of the item will stay put. + if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to): + to.append(to_item) + if is_hashable(to_item): + hashable_to_set.add(to_item) + else: + # If prepending a singleton that's already in the list, remove the + # existing instance and proceed with the prepend. This ensures that the + # item appears at the earliest possible position in the list. + while singleton and to_item in to: + to.remove(to_item) + + # Don't just insert everything at index 0. That would prepend the new + # items to the list in reverse order, which would be an unwelcome + # surprise. + to.insert(prepend_index, to_item) + if is_hashable(to_item): + hashable_to_set.add(to_item) + prepend_index = prepend_index + 1 + + +def MergeDicts(to, fro, to_file, fro_file): + # I wanted to name the parameter "from" but it's a Python keyword... + for k, v in fro.iteritems(): + # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give + # copy semantics. Something else may want to merge from the |fro| dict + # later, and having the same dict ref pointed to twice in the tree isn't + # what anyone wants considering that the dicts may subsequently be + # modified. + if k in to: + bad_merge = False + if isinstance(v, str) or isinstance(v, int): + if not (isinstance(to[k], str) or isinstance(to[k], int)): + bad_merge = True + elif v.__class__ != to[k].__class__: + bad_merge = True + + if bad_merge: + raise TypeError, \ + 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ + ' into incompatible type ' + to[k].__class__.__name__ + \ + ' for key ' + k + if isinstance(v, str) or isinstance(v, int): + # Overwrite the existing value, if any. Cheap and easy. + is_path = IsPathSection(k) + if is_path: + to[k] = MakePathRelative(to_file, fro_file, v) + else: + to[k] = v + elif isinstance(v, dict): + # Recurse, guaranteeing copies will be made of objects that require it. + if not k in to: + to[k] = {} + MergeDicts(to[k], v, to_file, fro_file) + elif isinstance(v, list): + # Lists in dicts can be merged with different policies, depending on + # how the key in the "from" dict (k, the from-key) is written. + # + # If the from-key has ...the to-list will have this action + # this character appended:... applied when receiving the from-list: + # = replace + # + prepend + # ? set, only if to-list does not yet exist + # (none) append + # + # This logic is list-specific, but since it relies on the associated + # dict key, it's checked in this dict-oriented function. + ext = k[-1] + append = True + if ext == '=': + list_base = k[:-1] + lists_incompatible = [list_base, list_base + '?'] + to[list_base] = [] + elif ext == '+': + list_base = k[:-1] + lists_incompatible = [list_base + '=', list_base + '?'] + append = False + elif ext == '?': + list_base = k[:-1] + lists_incompatible = [list_base, list_base + '=', list_base + '+'] + else: + list_base = k + lists_incompatible = [list_base + '=', list_base + '?'] + + # Some combinations of merge policies appearing together are meaningless. + # It's stupid to replace and append simultaneously, for example. Append + # and prepend are the only policies that can coexist. + for list_incompatible in lists_incompatible: + if list_incompatible in fro: + raise KeyError, 'Incompatible list policies ' + k + ' and ' + \ + list_incompatible + + if list_base in to: + if ext == '?': + # If the key ends in "?", the list will only be merged if it doesn't + # already exist. + continue + if not isinstance(to[list_base], list): + # This may not have been checked above if merging in a list with an + # extension character. + raise TypeError, \ + 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ + ' into incompatible type ' + to[list_base].__class__.__name__ + \ + ' for key ' + list_base + '(' + k + ')' + else: + to[list_base] = [] + + # Call MergeLists, which will make copies of objects that require it. + # MergeLists can recurse back into MergeDicts, although this will be + # to make copies of dicts (with paths fixed), there will be no + # subsequent dict "merging" once entering a list because lists are + # always replaced, appended to, or prepended to. + is_paths = IsPathSection(list_base) + MergeLists(to[list_base], v, to_file, fro_file, is_paths, append) + else: + raise TypeError, \ + 'Attempt to merge dict value of unsupported type ' + \ + v.__class__.__name__ + ' for key ' + k + + +def MergeConfigWithInheritance(new_configuration_dict, build_file, + target_dict, configuration, visited): + # Skip if previously visted. + if configuration in visited: + return + + # Look at this configuration. + configuration_dict = target_dict['configurations'][configuration] + + # Merge in parents. + for parent in configuration_dict.get('inherit_from', []): + MergeConfigWithInheritance(new_configuration_dict, build_file, + target_dict, parent, visited + [configuration]) + + # Merge it into the new config. + MergeDicts(new_configuration_dict, configuration_dict, + build_file, build_file) + + # Drop abstract. + if 'abstract' in new_configuration_dict: + del new_configuration_dict['abstract'] + + +def SetUpConfigurations(target, target_dict): + global non_configuration_keys + # key_suffixes is a list of key suffixes that might appear on key names. + # These suffixes are handled in conditional evaluations (for =, +, and ?) + # and rules/exclude processing (for ! and /). Keys with these suffixes + # should be treated the same as keys without. + key_suffixes = ['=', '+', '?', '!', '/'] + + build_file = gyp.common.BuildFile(target) + + # Provide a single configuration by default if none exists. + # TODO(mark): Signal an error if default_configurations exists but + # configurations does not. + if not 'configurations' in target_dict: + target_dict['configurations'] = {'Default': {}} + if not 'default_configuration' in target_dict: + concrete = [i for i in target_dict['configurations'].keys() + if not target_dict['configurations'][i].get('abstract')] + target_dict['default_configuration'] = sorted(concrete)[0] + + for configuration in target_dict['configurations'].keys(): + old_configuration_dict = target_dict['configurations'][configuration] + # Skip abstract configurations (saves work only). + if old_configuration_dict.get('abstract'): + continue + # Configurations inherit (most) settings from the enclosing target scope. + # Get the inheritance relationship right by making a copy of the target + # dict. + new_configuration_dict = copy.deepcopy(target_dict) + + # Take out the bits that don't belong in a "configurations" section. + # Since configuration setup is done before conditional, exclude, and rules + # processing, be careful with handling of the suffix characters used in + # those phases. + delete_keys = [] + for key in new_configuration_dict: + key_ext = key[-1:] + if key_ext in key_suffixes: + key_base = key[:-1] + else: + key_base = key + if key_base in non_configuration_keys: + delete_keys.append(key) + + for key in delete_keys: + del new_configuration_dict[key] + + # Merge in configuration (with all its parents first). + MergeConfigWithInheritance(new_configuration_dict, build_file, + target_dict, configuration, []) + + # Put the new result back into the target dict as a configuration. + target_dict['configurations'][configuration] = new_configuration_dict + + # Now drop all the abstract ones. + for configuration in target_dict['configurations'].keys(): + old_configuration_dict = target_dict['configurations'][configuration] + if old_configuration_dict.get('abstract'): + del target_dict['configurations'][configuration] + + # Now that all of the target's configurations have been built, go through + # the target dict's keys and remove everything that's been moved into a + # "configurations" section. + delete_keys = [] + for key in target_dict: + key_ext = key[-1:] + if key_ext in key_suffixes: + key_base = key[:-1] + else: + key_base = key + if not key_base in non_configuration_keys: + delete_keys.append(key) + for key in delete_keys: + del target_dict[key] + + # Check the configurations to see if they contain invalid keys. + for configuration in target_dict['configurations'].keys(): + configuration_dict = target_dict['configurations'][configuration] + for key in configuration_dict.keys(): + if key in invalid_configuration_keys: + raise KeyError, ('%s not allowed in the %s configuration, found in ' + 'target %s' % (key, configuration, target)) + + + +def ProcessListFiltersInDict(name, the_dict): + """Process regular expression and exclusion-based filters on lists. + + An exclusion list is in a dict key named with a trailing "!", like + "sources!". Every item in such a list is removed from the associated + main list, which in this example, would be "sources". Removed items are + placed into a "sources_excluded" list in the dict. + + Regular expression (regex) filters are contained in dict keys named with a + trailing "/", such as "sources/" to operate on the "sources" list. Regex + filters in a dict take the form: + 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'], + ['include', '_mac\\.cc$'] ], + The first filter says to exclude all files ending in _linux.cc, _mac.cc, and + _win.cc. The second filter then includes all files ending in _mac.cc that + are now or were once in the "sources" list. Items matching an "exclude" + filter are subject to the same processing as would occur if they were listed + by name in an exclusion list (ending in "!"). Items matching an "include" + filter are brought back into the main list if previously excluded by an + exclusion list or exclusion regex filter. Subsequent matching "exclude" + patterns can still cause items to be excluded after matching an "include". + """ + + # Look through the dictionary for any lists whose keys end in "!" or "/". + # These are lists that will be treated as exclude lists and regular + # expression-based exclude/include lists. Collect the lists that are + # needed first, looking for the lists that they operate on, and assemble + # then into |lists|. This is done in a separate loop up front, because + # the _included and _excluded keys need to be added to the_dict, and that + # can't be done while iterating through it. + + lists = [] + del_lists = [] + for key, value in the_dict.iteritems(): + operation = key[-1] + if operation != '!' and operation != '/': + continue + + if not isinstance(value, list): + raise ValueError, name + ' key ' + key + ' must be list, not ' + \ + value.__class__.__name__ + + list_key = key[:-1] + if list_key not in the_dict: + # This happens when there's a list like "sources!" but no corresponding + # "sources" list. Since there's nothing for it to operate on, queue up + # the "sources!" list for deletion now. + del_lists.append(key) + continue + + if not isinstance(the_dict[list_key], list): + raise ValueError, name + ' key ' + list_key + \ + ' must be list, not ' + \ + value.__class__.__name__ + ' when applying ' + \ + {'!': 'exclusion', '/': 'regex'}[operation] + + if not list_key in lists: + lists.append(list_key) + + # Delete the lists that are known to be unneeded at this point. + for del_list in del_lists: + del the_dict[del_list] + + for list_key in lists: + the_list = the_dict[list_key] + + # Initialize the list_actions list, which is parallel to the_list. Each + # item in list_actions identifies whether the corresponding item in + # the_list should be excluded, unconditionally preserved (included), or + # whether no exclusion or inclusion has been applied. Items for which + # no exclusion or inclusion has been applied (yet) have value -1, items + # excluded have value 0, and items included have value 1. Includes and + # excludes override previous actions. All items in list_actions are + # initialized to -1 because no excludes or includes have been processed + # yet. + list_actions = list((-1,) * len(the_list)) + + exclude_key = list_key + '!' + if exclude_key in the_dict: + for exclude_item in the_dict[exclude_key]: + for index in xrange(0, len(the_list)): + if exclude_item == the_list[index]: + # This item matches the exclude_item, so set its action to 0 + # (exclude). + list_actions[index] = 0 + + # The "whatever!" list is no longer needed, dump it. + del the_dict[exclude_key] + + regex_key = list_key + '/' + if regex_key in the_dict: + for regex_item in the_dict[regex_key]: + [action, pattern] = regex_item + pattern_re = re.compile(pattern) + + if action == 'exclude': + # This item matches an exclude regex, so set its value to 0 (exclude). + action_value = 0 + elif action == 'include': + # This item matches an include regex, so set its value to 1 (include). + action_value = 1 + else: + # This is an action that doesn't make any sense. + raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \ + ' key ' + key + + for index in xrange(0, len(the_list)): + list_item = the_list[index] + if list_actions[index] == action_value: + # Even if the regex matches, nothing will change so continue (regex + # searches are expensive). + continue + if pattern_re.search(list_item): + # Regular expression match. + list_actions[index] = action_value + + # The "whatever/" list is no longer needed, dump it. + del the_dict[regex_key] + + # Add excluded items to the excluded list. + # + # Note that exclude_key ("sources!") is different from excluded_key + # ("sources_excluded"). The exclude_key list is input and it was already + # processed and deleted; the excluded_key list is output and it's about + # to be created. + excluded_key = list_key + '_excluded' + if excluded_key in the_dict: + raise KeyError, \ + name + ' key ' + excluded_key + ' must not be present prior ' + \ + ' to applying exclusion/regex filters for ' + list_key + + excluded_list = [] + + # Go backwards through the list_actions list so that as items are deleted, + # the indices of items that haven't been seen yet don't shift. That means + # that things need to be prepended to excluded_list to maintain them in the + # same order that they existed in the_list. + for index in xrange(len(list_actions) - 1, -1, -1): + if list_actions[index] == 0: + # Dump anything with action 0 (exclude). Keep anything with action 1 + # (include) or -1 (no include or exclude seen for the item). + excluded_list.insert(0, the_list[index]) + del the_list[index] + + # If anything was excluded, put the excluded list into the_dict at + # excluded_key. + if len(excluded_list) > 0: + the_dict[excluded_key] = excluded_list + + # Now recurse into subdicts and lists that may contain dicts. + for key, value in the_dict.iteritems(): + if isinstance(value, dict): + ProcessListFiltersInDict(key, value) + elif isinstance(value, list): + ProcessListFiltersInList(key, value) + + +def ProcessListFiltersInList(name, the_list): + for item in the_list: + if isinstance(item, dict): + ProcessListFiltersInDict(name, item) + elif isinstance(item, list): + ProcessListFiltersInList(name, item) + + +def ValidateTargetType(target, target_dict): + """Ensures the 'type' field on the target is one of the known types. + + Arguments: + target: string, name of target. + target_dict: dict, target spec. + + Raises an exception on error. + """ + VALID_TARGET_TYPES = ('executable', 'loadable_module', + 'static_library', 'shared_library', + 'none') + target_type = target_dict.get('type', None) + if target_type not in VALID_TARGET_TYPES: + raise Exception("Target %s has an invalid target type '%s'. " + "Must be one of %s." % + (target, target_type, '/'.join(VALID_TARGET_TYPES))) + + +def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): + """Ensures that the rules sections in target_dict are valid and consistent, + and determines which sources they apply to. + + Arguments: + target: string, name of target. + target_dict: dict, target spec containing "rules" and "sources" lists. + extra_sources_for_rules: a list of keys to scan for rule matches in + addition to 'sources'. + """ + + # Dicts to map between values found in rules' 'rule_name' and 'extension' + # keys and the rule dicts themselves. + rule_names = {} + rule_extensions = {} + + rules = target_dict.get('rules', []) + for rule in rules: + # Make sure that there's no conflict among rule names and extensions. + rule_name = rule['rule_name'] + if rule_name in rule_names: + raise KeyError, 'rule %s exists in duplicate, target %s' % \ + (rule_name, target) + rule_names[rule_name] = rule + + rule_extension = rule['extension'] + if rule_extension in rule_extensions: + raise KeyError, ('extension %s associated with multiple rules, ' + + 'target %s rules %s and %s') % \ + (rule_extension, target, + rule_extensions[rule_extension]['rule_name'], + rule_name) + rule_extensions[rule_extension] = rule + + # Make sure rule_sources isn't already there. It's going to be + # created below if needed. + if 'rule_sources' in rule: + raise KeyError, \ + 'rule_sources must not exist in input, target %s rule %s' % \ + (target, rule_name) + extension = rule['extension'] + + rule_sources = [] + source_keys = ['sources'] + source_keys.extend(extra_sources_for_rules) + for source_key in source_keys: + for source in target_dict.get(source_key, []): + (source_root, source_extension) = os.path.splitext(source) + if source_extension.startswith('.'): + source_extension = source_extension[1:] + if source_extension == extension: + rule_sources.append(source) + + if len(rule_sources) > 0: + rule['rule_sources'] = rule_sources + + +def ValidateActionsInTarget(target, target_dict, build_file): + '''Validates the inputs to the actions in a target.''' + target_name = target_dict.get('target_name') + actions = target_dict.get('actions', []) + for action in actions: + action_name = action.get('action_name') + if not action_name: + raise Exception("Anonymous action in target %s. " + "An action must have an 'action_name' field." % + target_name) + inputs = action.get('inputs', []) + + +def ValidateRunAsInTarget(target, target_dict, build_file): + target_name = target_dict.get('target_name') + run_as = target_dict.get('run_as') + if not run_as: + return + if not isinstance(run_as, dict): + raise Exception("The 'run_as' in target %s from file %s should be a " + "dictionary." % + (target_name, build_file)) + action = run_as.get('action') + if not action: + raise Exception("The 'run_as' in target %s from file %s must have an " + "'action' section." % + (target_name, build_file)) + if not isinstance(action, list): + raise Exception("The 'action' for 'run_as' in target %s from file %s " + "must be a list." % + (target_name, build_file)) + working_directory = run_as.get('working_directory') + if working_directory and not isinstance(working_directory, str): + raise Exception("The 'working_directory' for 'run_as' in target %s " + "in file %s should be a string." % + (target_name, build_file)) + environment = run_as.get('environment') + if environment and not isinstance(environment, dict): + raise Exception("The 'environment' for 'run_as' in target %s " + "in file %s should be a dictionary." % + (target_name, build_file)) + + +def TurnIntIntoStrInDict(the_dict): + """Given dict the_dict, recursively converts all integers into strings. + """ + # Use items instead of iteritems because there's no need to try to look at + # reinserted keys and their associated values. + for k, v in the_dict.items(): + if isinstance(v, int): + v = str(v) + the_dict[k] = v + elif isinstance(v, dict): + TurnIntIntoStrInDict(v) + elif isinstance(v, list): + TurnIntIntoStrInList(v) + + if isinstance(k, int): + the_dict[str(k)] = v + del the_dict[k] + + +def TurnIntIntoStrInList(the_list): + """Given list the_list, recursively converts all integers into strings. + """ + for index in xrange(0, len(the_list)): + item = the_list[index] + if isinstance(item, int): + the_list[index] = str(item) + elif isinstance(item, dict): + TurnIntIntoStrInDict(item) + elif isinstance(item, list): + TurnIntIntoStrInList(item) + + +def VerifyNoCollidingTargets(targets): + """Verify that no two targets in the same directory share the same name. + + Arguments: + targets: A list of targets in the form 'path/to/file.gyp:target_name'. + """ + # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'. + used = {} + for target in targets: + # Separate out 'path/to/file.gyp, 'target_name' from + # 'path/to/file.gyp:target_name'. + path, name = target.rsplit(':', 1) + # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'. + subdir, gyp = os.path.split(path) + # Use '.' for the current directory '', so that the error messages make + # more sense. + if not subdir: + subdir = '.' + # Prepare a key like 'path/to:target_name'. + key = subdir + ':' + name + if key in used: + # Complain if this target is already used. + raise Exception('Duplicate target name "%s" in directory "%s" used both ' + 'in "%s" and "%s".' % (name, subdir, gyp, used[key])) + used[key] = gyp + + +def Load(build_files, variables, includes, depth, generator_input_info, check, + circular_check): + # Set up path_sections and non_configuration_keys with the default data plus + # the generator-specifc data. + global path_sections + path_sections = base_path_sections[:] + path_sections.extend(generator_input_info['path_sections']) + + global non_configuration_keys + non_configuration_keys = base_non_configuration_keys[:] + non_configuration_keys.extend(generator_input_info['non_configuration_keys']) + + # TODO(mark) handle variants if the generator doesn't want them directly. + generator_handles_variants = \ + generator_input_info['generator_handles_variants'] + + global absolute_build_file_paths + absolute_build_file_paths = \ + generator_input_info['generator_wants_absolute_build_file_paths'] + + global multiple_toolsets + multiple_toolsets = generator_input_info[ + 'generator_supports_multiple_toolsets'] + + # A generator can have other lists (in addition to sources) be processed + # for rules. + extra_sources_for_rules = generator_input_info['extra_sources_for_rules'] + + # Load build files. This loads every target-containing build file into + # the |data| dictionary such that the keys to |data| are build file names, + # and the values are the entire build file contents after "early" or "pre" + # processing has been done and includes have been resolved. + # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as + # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps + # track of the keys corresponding to "target" files. + data = {'target_build_files': set()} + aux_data = {} + for build_file in build_files: + # Normalize paths everywhere. This is important because paths will be + # used as keys to the data dict and for references between input files. + build_file = os.path.normpath(build_file) + try: + LoadTargetBuildFile(build_file, data, aux_data, variables, includes, + depth, check) + except Exception, e: + gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) + raise + + # Build a dict to access each target's subdict by qualified name. + targets = BuildTargetsDict(data) + + # Fully qualify all dependency links. + QualifyDependencies(targets) + + # Expand dependencies specified as build_file:*. + ExpandWildcardDependencies(targets, data) + + # Apply exclude (!) and regex (/) list filters only for dependency_sections. + for target_name, target_dict in targets.iteritems(): + tmp_dict = {} + for key_base in dependency_sections: + for op in ('', '!', '/'): + key = key_base + op + if key in target_dict: + tmp_dict[key] = target_dict[key] + del target_dict[key] + ProcessListFiltersInDict(target_name, tmp_dict) + # Write the results back to |target_dict|. + for key in tmp_dict: + target_dict[key] = tmp_dict[key] + + if circular_check: + # Make sure that any targets in a.gyp don't contain dependencies in other + # .gyp files that further depend on a.gyp. + VerifyNoGYPFileCircularDependencies(targets) + + [dependency_nodes, flat_list] = BuildDependencyList(targets) + + # Check that no two targets in the same directory have the same name. + VerifyNoCollidingTargets(flat_list) + + # Handle dependent settings of various types. + for settings_type in ['all_dependent_settings', + 'direct_dependent_settings', + 'link_settings']: + DoDependentSettings(settings_type, flat_list, targets, dependency_nodes) + + # Take out the dependent settings now that they've been published to all + # of the targets that require them. + for target in flat_list: + if settings_type in targets[target]: + del targets[target][settings_type] + + # Make sure static libraries don't declare dependencies on other static + # libraries, but that linkables depend on all unlinked static libraries + # that they need so that their link steps will be correct. + gii = generator_input_info + if gii['generator_wants_static_library_dependencies_adjusted']: + AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, + gii['generator_wants_sorted_dependencies']) + + # Apply "post"/"late"/"target" variable expansions and condition evaluations. + for target in flat_list: + target_dict = targets[target] + build_file = gyp.common.BuildFile(target) + ProcessVariablesAndConditionsInDict(target_dict, True, variables, + build_file) + + # Move everything that can go into a "configurations" section into one. + for target in flat_list: + target_dict = targets[target] + SetUpConfigurations(target, target_dict) + + # Apply exclude (!) and regex (/) list filters. + for target in flat_list: + target_dict = targets[target] + ProcessListFiltersInDict(target, target_dict) + + # Make sure that the rules make sense, and build up rule_sources lists as + # needed. Not all generators will need to use the rule_sources lists, but + # some may, and it seems best to build the list in a common spot. + # Also validate actions and run_as elements in targets. + for target in flat_list: + target_dict = targets[target] + build_file = gyp.common.BuildFile(target) + ValidateTargetType(target, target_dict) + ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) + ValidateRunAsInTarget(target, target_dict, build_file) + ValidateActionsInTarget(target, target_dict, build_file) + + # Generators might not expect ints. Turn them into strs. + TurnIntIntoStrInDict(data) + + # TODO(mark): Return |data| for now because the generator needs a list of + # build files that came in. In the future, maybe it should just accept + # a list, and not the whole data dict. + return [flat_list, targets, data] diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/mac_tool.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/mac_tool.py new file mode 100755 index 0000000000..42dd6c9923 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/mac_tool.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility functions to perform Xcode-style build steps. + +These functions are executed via gyp-mac-tool when using the Makefile generator. +""" + +import fcntl +import os +import plistlib +import re +import shutil +import string +import subprocess +import sys + + +def main(args): + executor = MacTool() + exit_code = executor.Dispatch(args) + if exit_code is not None: + sys.exit(exit_code) + + +class MacTool(object): + """This class performs all the Mac tooling steps. The methods can either be + executed directly, or dispatched from an argument list.""" + + def Dispatch(self, args): + """Dispatches a string command to a method.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + method = "Exec%s" % self._CommandifyName(args[0]) + return getattr(self, method)(*args[1:]) + + def _CommandifyName(self, name_string): + """Transforms a tool name like copy-info-plist to CopyInfoPlist""" + return name_string.title().replace('-', '') + + def ExecCopyBundleResource(self, source, dest): + """Copies a resource file to the bundle/Resources directory, performing any + necessary compilation on each resource.""" + extension = os.path.splitext(source)[1].lower() + if os.path.isdir(source): + # Copy tree. + if os.path.exists(dest): + shutil.rmtree(dest) + shutil.copytree(source, dest) + elif extension == '.xib': + self._CopyXIBFile(source, dest) + elif extension == '.strings': + self._CopyStringsFile(source, dest) + # TODO: Given that files with arbitrary extensions can be copied to the + # bundle, we will want to get rid of this whitelist eventually. + elif extension in [ + '.icns', '.manifest', '.pak', '.pdf', '.png', '.sb', '.sh', + '.ttf', '.sdef']: + shutil.copyfile(source, dest) + else: + raise NotImplementedError( + "Don't know how to copy bundle resources of type %s while copying " + "%s to %s)" % (extension, source, dest)) + + def _CopyXIBFile(self, source, dest): + """Compiles a XIB file with ibtool into a binary plist in the bundle.""" + args = ['/Developer/usr/bin/ibtool', '--errors', '--warnings', + '--notices', '--output-format', 'human-readable-text', '--compile', + dest, source] + subprocess.call(args) + + def _CopyStringsFile(self, source, dest): + """Copies a .strings file using iconv to reconvert the input into UTF-16.""" + input_code = self._DetectInputEncoding(source) or "UTF-8" + fp = open(dest, 'w') + args = ['/usr/bin/iconv', '--from-code', input_code, '--to-code', + 'UTF-16', source] + subprocess.call(args, stdout=fp) + fp.close() + + def _DetectInputEncoding(self, file_name): + """Reads the first few bytes from file_name and tries to guess the text + encoding. Returns None as a guess if it can't detect it.""" + fp = open(file_name, 'rb') + try: + header = fp.read(3) + except e: + fp.close() + return None + fp.close() + if header.startswith("\xFE\xFF"): + return "UTF-16BE" + elif header.startswith("\xFF\xFE"): + return "UTF-16LE" + elif header.startswith("\xEF\xBB\xBF"): + return "UTF-8" + else: + return None + + def ExecCopyInfoPlist(self, source, dest): + """Copies the |source| Info.plist to the destination directory |dest|.""" + # Read the source Info.plist into memory. + fd = open(source, 'r') + lines = fd.read() + fd.close() + + # Go through all the environment variables and replace them as variables in + # the file. + for key in os.environ: + if key.startswith('_'): + continue + evar = '${%s}' % key + lines = string.replace(lines, evar, os.environ[key]) + + # Write out the file with variables replaced. + fd = open(dest, 'w') + fd.write(lines) + fd.close() + + # Now write out PkgInfo file now that the Info.plist file has been + # "compiled". + self._WritePkgInfo(dest) + + def _WritePkgInfo(self, info_plist): + """This writes the PkgInfo file from the data stored in Info.plist.""" + plist = plistlib.readPlist(info_plist) + if not plist: + return + + # Only create PkgInfo for executable types. + package_type = plist['CFBundlePackageType'] + if package_type != 'APPL': + return + + # The format of PkgInfo is eight characters, representing the bundle type + # and bundle signature, each four characters. If that is missing, four + # '?' characters are used instead. + signature_code = plist['CFBundleSignature'] + if len(signature_code) != 4: + signature_code = '?' * 4 + + dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') + fp = open(dest, 'w') + fp.write('%s%s' % (package_type, signature_code)) + fp.close() + + def ExecFlock(self, lockfile, *cmd_list): + """Emulates the most basic behavior of Linux's flock(1).""" + # Rely on exception handling to report errors. + fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) + fcntl.flock(fd, fcntl.LOCK_EX) + return subprocess.call(cmd_list) + + def ExecFilterLibtool(self, *cmd_list): + """Calls libtool and filters out 'libtool: file: foo.o has no symbols'.""" + libtool_re = re.compile(r'^libtool: file: .* has no symbols$') + libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE) + for line in libtoolout.stderr: + if not libtool_re.match(line): + sys.stderr.write(line) + return libtoolout.returncode + + def ExecPackageFramework(self, framework, version): + """Takes a path to Something.framework and the Current version of that and + sets up all the symlinks.""" + # Find the name of the binary based on the part before the ".framework". + binary = os.path.basename(framework).split('.')[0] + + CURRENT = 'Current' + RESOURCES = 'Resources' + VERSIONS = 'Versions' + + if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): + # Binary-less frameworks don't seem to contain symlinks (see e.g. + # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). + return + + # Move into the framework directory to set the symlinks correctly. + pwd = os.getcwd() + os.chdir(framework) + + # Set up the Current version. + self._Relink(version, os.path.join(VERSIONS, CURRENT)) + + # Set up the root symlinks. + self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) + self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) + + # Back to where we were before! + os.chdir(pwd) + + def _Relink(self, dest, link): + """Creates a symlink to |dest| named |link|. If |link| already exists, + it is overwritten.""" + if os.path.lexists(link): + os.remove(link) + os.symlink(dest, link) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/ninja_syntax.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/ninja_syntax.py new file mode 100644 index 0000000000..27e1908761 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/ninja_syntax.py @@ -0,0 +1,145 @@ +# This file comes from +# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py +# Do not edit! Edit the upstream one instead. + +"""Python module for generating .ninja files. + +Note that this is emphatically not a required piece of Ninja; it's +just a helpful utility for build-file-generation systems that already +use Python. +""" + +import textwrap +import re + +def escape_spaces(word): + return word.replace('$ ','$$ ').replace(' ','$ ') + +class Writer(object): + def __init__(self, output, width=78): + self.output = output + self.width = width + + def newline(self): + self.output.write('\n') + + def comment(self, text): + for line in textwrap.wrap(text, self.width - 2): + self.output.write('# ' + line + '\n') + + def variable(self, key, value, indent=0): + if value is None: + return + if isinstance(value, list): + value = ' '.join(filter(None, value)) # Filter out empty strings. + self._line('%s = %s' % (key, value), indent) + + def rule(self, name, command, description=None, depfile=None, + generator=False, restat=False, deplist=None): + self._line('rule %s' % name) + self.variable('command', command, indent=1) + if description: + self.variable('description', description, indent=1) + if depfile: + self.variable('depfile', depfile, indent=1) + if deplist: + self.variable('deplist', deplist, indent=1) + if generator: + self.variable('generator', '1', indent=1) + if restat: + self.variable('restat', '1', indent=1) + + def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, + variables=None): + outputs = self._as_list(outputs) + all_inputs = self._as_list(inputs)[:] + out_outputs = map(escape_spaces, outputs) + all_inputs = map(escape_spaces, all_inputs) + + if implicit: + implicit = map(escape_spaces, self._as_list(implicit)) + all_inputs.append('|') + all_inputs.extend(implicit) + if order_only: + order_only = map(escape_spaces, self._as_list(order_only)) + all_inputs.append('||') + all_inputs.extend(order_only) + + self._line('build %s: %s %s' % (' '.join(out_outputs), + rule, + ' '.join(all_inputs))) + + if variables: + for key, val in variables: + self.variable(key, val, indent=1) + + return outputs + + def include(self, path): + self._line('include %s' % path) + + def subninja(self, path): + self._line('subninja %s' % path) + + def default(self, paths): + self._line('default %s' % ' '.join(self._as_list(paths))) + + def _count_dollars_before_index(self, s, i): + """Returns the number of '$' characters right in front of s[i].""" + dollar_count = 0 + dollar_index = i - 1 + while dollar_index > 0 and s[dollar_index] == '$': + dollar_count += 1 + dollar_index -= 1 + return dollar_count + + def _line(self, text, indent=0): + """Write 'text' word-wrapped at self.width characters.""" + leading_space = ' ' * indent + while len(text) > self.width: + # The text is too wide; wrap if possible. + + # Find the rightmost space that would obey our width constraint and + # that's not an escaped space. + available_space = self.width - len(leading_space) - len(' $') + space = available_space + while True: + space = text.rfind(' ', 0, space) + if space < 0 or \ + self._count_dollars_before_index(text, space) % 2 == 0: + break + + if space < 0: + # No such space; just use the first unescaped space we can find. + space = available_space - 1 + while True: + space = text.find(' ', space + 1) + if space < 0 or \ + self._count_dollars_before_index(text, space) % 2 == 0: + break + if space < 0: + # Give up on breaking. + break + + self.output.write(leading_space + text[0:space] + ' $\n') + text = text[space+1:] + + # Subsequent lines are continuations, so indent them. + leading_space = ' ' * (indent+2) + + self.output.write(leading_space + text + '\n') + + def _as_list(self, input): + if input is None: + return [] + if isinstance(input, list): + return input + return [input] + + +def escape(string): + """Escape a string such that it can be embedded into a Ninja file without + further interpretation.""" + assert '\n' not in string, 'Ninja syntax does not allow newlines' + # We only have one special metacharacter: '$'. + return string.replace('$', '$$') diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/sun_tool.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/sun_tool.py new file mode 100755 index 0000000000..90d59c8240 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/sun_tool.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""These functions are executed via gyp-sun-tool when using the Makefile +generator.""" + +import fcntl +import os +import struct +import subprocess +import sys + + +def main(args): + executor = SunTool() + executor.Dispatch(args) + + +class SunTool(object): + """This class performs all the SunOS tooling steps. The methods can either be + executed directly, or dispatched from an argument list.""" + + def Dispatch(self, args): + """Dispatches a string command to a method.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + method = "Exec%s" % self._CommandifyName(args[0]) + getattr(self, method)(*args[1:]) + + def _CommandifyName(self, name_string): + """Transforms a tool name like copy-info-plist to CopyInfoPlist""" + return name_string.title().replace('-', '') + + def ExecFlock(self, lockfile, *cmd_list): + """Emulates the most basic behavior of Linux's flock(1).""" + # Rely on exception handling to report errors. + # Note that the stock python on SunOS has a bug + # where fcntl.flock(fd, LOCK_EX) always fails + # with EBADF, that's why we use this F_SETLK + # hack instead. + fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666) + op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) + fcntl.fcntl(fd, fcntl.F_SETLK, op) + return subprocess.call(cmd_list) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/system_test.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/system_test.py new file mode 100755 index 0000000000..51c71e36be --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/system_test.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python + +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import tempfile +import shutil +import subprocess + + +def TestCommands(commands, files={}, env={}): + """Run commands in a temporary directory, returning true if they all succeed. + Return false on failures or if any commands produce output. + + Arguments: + commands: an array of shell-interpretable commands, e.g. ['ls -l', 'pwd'] + each will be expanded with Python %-expansion using env first. + files: a dictionary mapping filename to contents; + files will be created in the temporary directory before running + the command. + env: a dictionary of strings to expand commands with. + """ + tempdir = tempfile.mkdtemp() + try: + for name, contents in files.items(): + f = open(os.path.join(tempdir, name), 'wb') + f.write(contents) + f.close() + for command in commands: + proc = subprocess.Popen(command % env, shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=tempdir) + output = proc.communicate()[0] + if proc.returncode != 0 or output: + return False + return True + finally: + shutil.rmtree(tempdir) + return False + + +def TestArSupportsT(ar_command='ar', cc_command='cc'): + """Test whether 'ar' supports the 'T' flag.""" + return TestCommands(['%(cc)s -c test.c', + '%(ar)s crsT test.a test.o', + '%(cc)s test.a'], + files={'test.c': 'int main(){}'}, + env={'ar': ar_command, 'cc': cc_command}) + + +def main(): + # Run the various test functions and print the results. + def RunTest(description, function, **kwargs): + print "Testing " + description + ':', + if function(**kwargs): + print 'ok' + else: + print 'fail' + RunTest("ar 'T' flag", TestArSupportsT) + RunTest("ar 'T' flag with ccache", TestArSupportsT, cc_command='ccache cc') + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/xcode_emulation.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/xcode_emulation.py new file mode 100644 index 0000000000..4d8440b69f --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/xcode_emulation.py @@ -0,0 +1,972 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +This module contains classes that help to emulate xcodebuild behavior on top of +other build systems, such as make and ninja. +""" + +import gyp.common +import os.path +import re +import shlex + +class XcodeSettings(object): + """A class that understands the gyp 'xcode_settings' object.""" + + def __init__(self, spec): + self.spec = spec + + # Per-target 'xcode_settings' are pushed down into configs earlier by gyp. + # This means self.xcode_settings[config] always contains all settings + # for that config -- the per-target settings as well. Settings that are + # the same for all configs are implicitly per-target settings. + self.xcode_settings = {} + configs = spec['configurations'] + for configname, config in configs.iteritems(): + self.xcode_settings[configname] = config.get('xcode_settings', {}) + + # This is only non-None temporarily during the execution of some methods. + self.configname = None + + # Used by _AdjustLibrary to match .a and .dylib entries in libraries. + self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$') + + def _Settings(self): + assert self.configname + return self.xcode_settings[self.configname] + + def _Test(self, test_key, cond_key, default): + return self._Settings().get(test_key, default) == cond_key + + def _Appendf(self, lst, test_key, format_str, default=None): + if test_key in self._Settings(): + lst.append(format_str % str(self._Settings()[test_key])) + elif default: + lst.append(format_str % str(default)) + + def _WarnUnimplemented(self, test_key): + if test_key in self._Settings(): + print 'Warning: Ignoring not yet implemented key "%s".' % test_key + + def _IsBundle(self): + return int(self.spec.get('mac_bundle', 0)) != 0 + + def GetFrameworkVersion(self): + """Returns the framework version of the current target. Only valid for + bundles.""" + assert self._IsBundle() + return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A') + + def GetWrapperExtension(self): + """Returns the bundle extension (.app, .framework, .plugin, etc). Only + valid for bundles.""" + assert self._IsBundle() + if self.spec['type'] in ('loadable_module', 'shared_library'): + default_wrapper_extension = { + 'loadable_module': 'bundle', + 'shared_library': 'framework', + }[self.spec['type']] + wrapper_extension = self.GetPerTargetSetting( + 'WRAPPER_EXTENSION', default=default_wrapper_extension) + return '.' + self.spec.get('product_extension', wrapper_extension) + elif self.spec['type'] == 'executable': + return '.app' + else: + assert False, "Don't know extension for '%s', target '%s'" % ( + self.spec['type'], self.spec['target_name']) + + def GetProductName(self): + """Returns PRODUCT_NAME.""" + return self.spec.get('product_name', self.spec['target_name']) + + def GetFullProductName(self): + """Returns FULL_PRODUCT_NAME.""" + if self._IsBundle(): + return self.GetWrapperName() + else: + return self._GetStandaloneBinaryPath() + + def GetWrapperName(self): + """Returns the directory name of the bundle represented by this target. + Only valid for bundles.""" + assert self._IsBundle() + return self.GetProductName() + self.GetWrapperExtension() + + def GetBundleContentsFolderPath(self): + """Returns the qualified path to the bundle's contents folder. E.g. + Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles.""" + assert self._IsBundle() + if self.spec['type'] == 'shared_library': + return os.path.join( + self.GetWrapperName(), 'Versions', self.GetFrameworkVersion()) + else: + # loadable_modules have a 'Contents' folder like executables. + return os.path.join(self.GetWrapperName(), 'Contents') + + def GetBundleResourceFolder(self): + """Returns the qualified path to the bundle's resource folder. E.g. + Chromium.app/Contents/Resources. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleContentsFolderPath(), 'Resources') + + def GetBundlePlistPath(self): + """Returns the qualified path to the bundle's plist file. E.g. + Chromium.app/Contents/Info.plist. Only valid for bundles.""" + assert self._IsBundle() + if self.spec['type'] in ('executable', 'loadable_module'): + return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist') + else: + return os.path.join(self.GetBundleContentsFolderPath(), + 'Resources', 'Info.plist') + + def GetProductType(self): + """Returns the PRODUCT_TYPE of this target.""" + if self._IsBundle(): + return { + 'executable': 'com.apple.product-type.application', + 'loadable_module': 'com.apple.product-type.bundle', + 'shared_library': 'com.apple.product-type.framework', + }[self.spec['type']] + else: + return { + 'executable': 'com.apple.product-type.tool', + 'loadable_module': 'com.apple.product-type.library.dynamic', + 'shared_library': 'com.apple.product-type.library.dynamic', + 'static_library': 'com.apple.product-type.library.static', + }[self.spec['type']] + + def GetMachOType(self): + """Returns the MACH_O_TYPE of this target.""" + # Weird, but matches Xcode. + if not self._IsBundle() and self.spec['type'] == 'executable': + return '' + return { + 'executable': 'mh_execute', + 'static_library': 'staticlib', + 'shared_library': 'mh_dylib', + 'loadable_module': 'mh_bundle', + }[self.spec['type']] + + def _GetBundleBinaryPath(self): + """Returns the name of the bundle binary of by this target. + E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles.""" + assert self._IsBundle() + if self.spec['type'] in ('shared_library'): + path = self.GetBundleContentsFolderPath() + elif self.spec['type'] in ('executable', 'loadable_module'): + path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS') + return os.path.join(path, self.GetExecutableName()) + + def _GetStandaloneExecutableSuffix(self): + if 'product_extension' in self.spec: + return '.' + self.spec['product_extension'] + return { + 'executable': '', + 'static_library': '.a', + 'shared_library': '.dylib', + 'loadable_module': '.so', + }[self.spec['type']] + + def _GetStandaloneExecutablePrefix(self): + return self.spec.get('product_prefix', { + 'executable': '', + 'static_library': 'lib', + 'shared_library': 'lib', + # Non-bundled loadable_modules are called foo.so for some reason + # (that is, .so and no prefix) with the xcode build -- match that. + 'loadable_module': '', + }[self.spec['type']]) + + def _GetStandaloneBinaryPath(self): + """Returns the name of the non-bundle binary represented by this target. + E.g. hello_world. Only valid for non-bundles.""" + assert not self._IsBundle() + assert self.spec['type'] in ( + 'executable', 'shared_library', 'static_library', 'loadable_module'), ( + 'Unexpected type %s' % self.spec['type']) + target = self.spec['target_name'] + if self.spec['type'] == 'static_library': + if target[:3] == 'lib': + target = target[3:] + elif self.spec['type'] in ('loadable_module', 'shared_library'): + if target[:3] == 'lib': + target = target[3:] + + target_prefix = self._GetStandaloneExecutablePrefix() + target = self.spec.get('product_name', target) + target_ext = self._GetStandaloneExecutableSuffix() + return target_prefix + target + target_ext + + def GetExecutableName(self): + """Returns the executable name of the bundle represented by this target. + E.g. Chromium.""" + if self._IsBundle(): + return self.spec.get('product_name', self.spec['target_name']) + else: + return self._GetStandaloneBinaryPath() + + def GetExecutablePath(self): + """Returns the directory name of the bundle represented by this target. E.g. + Chromium.app/Contents/MacOS/Chromium.""" + if self._IsBundle(): + return self._GetBundleBinaryPath() + else: + return self._GetStandaloneBinaryPath() + + def _SdkPath(self): + sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx10.5') + if sdk_root.startswith('macosx'): + sdk_root = 'MacOSX' + sdk_root[len('macosx'):] + return '/Developer/SDKs/%s.sdk' % sdk_root + + def GetCflags(self, configname): + """Returns flags that need to be added to .c, .cc, .m, and .mm + compilations.""" + # This functions (and the similar ones below) do not offer complete + # emulation of all xcode_settings keys. They're implemented on demand. + + self.configname = configname + cflags = [] + + sdk_root = self._SdkPath() + if 'SDKROOT' in self._Settings(): + cflags.append('-isysroot %s' % sdk_root) + + if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'): + cflags.append('-funsigned-char') + + if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'): + cflags.append('-fasm-blocks') + + if 'GCC_DYNAMIC_NO_PIC' in self._Settings(): + if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES': + cflags.append('-mdynamic-no-pic') + else: + pass + # TODO: In this case, it depends on the target. xcode passes + # mdynamic-no-pic by default for executable and possibly static lib + # according to mento + + if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'): + cflags.append('-mpascal-strings') + + self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s') + + if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'): + dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf') + if dbg_format == 'dwarf': + cflags.append('-gdwarf-2') + elif dbg_format == 'stabs': + raise NotImplementedError('stabs debug format is not supported yet.') + elif dbg_format == 'dwarf-with-dsym': + cflags.append('-gdwarf-2') + else: + raise NotImplementedError('Unknown debug format %s' % dbg_format) + + if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'): + cflags.append('-fvisibility=hidden') + + if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'): + cflags.append('-Werror') + + if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'): + cflags.append('-Wnewline-eof') + + self._Appendf(cflags, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') + + # TODO: + if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'): + self._WarnUnimplemented('COPY_PHASE_STRIP') + self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS') + self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS') + self._WarnUnimplemented('GCC_ENABLE_OBJC_GC') + + # TODO: This is exported correctly, but assigning to it is not supported. + self._WarnUnimplemented('MACH_O_TYPE') + self._WarnUnimplemented('PRODUCT_TYPE') + + archs = self._Settings().get('ARCHS', ['i386']) + if len(archs) != 1: + # TODO: Supporting fat binaries will be annoying. + self._WarnUnimplemented('ARCHS') + archs = ['i386'] + cflags.append('-arch ' + archs[0]) + + if archs[0] in ('i386', 'x86_64'): + if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'): + cflags.append('-msse3') + if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES', + default='NO'): + cflags.append('-mssse3') # Note 3rd 's'. + if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'): + cflags.append('-msse4.1') + if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'): + cflags.append('-msse4.2') + + cflags += self._Settings().get('OTHER_CFLAGS', []) + cflags += self._Settings().get('WARNING_CFLAGS', []) + + config = self.spec['configurations'][self.configname] + framework_dirs = config.get('mac_framework_dirs', []) + for directory in framework_dirs: + cflags.append('-F ' + directory.replace('$(SDKROOT)', sdk_root)) + + self.configname = None + return cflags + + def GetCflagsC(self, configname): + """Returns flags that need to be added to .c, and .m compilations.""" + self.configname = configname + cflags_c = [] + self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s') + self.configname = None + return cflags_c + + def GetCflagsCC(self, configname): + """Returns flags that need to be added to .cc, and .mm compilations.""" + self.configname = configname + cflags_cc = [] + if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'): + cflags_cc.append('-fno-rtti') + if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'): + cflags_cc.append('-fno-exceptions') + if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'): + cflags_cc.append('-fvisibility-inlines-hidden') + if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'): + cflags_cc.append('-fno-threadsafe-statics') + self.configname = None + return cflags_cc + + def GetCflagsObjC(self, configname): + """Returns flags that need to be added to .m compilations.""" + self.configname = configname + self.configname = None + return [] + + def GetCflagsObjCC(self, configname): + """Returns flags that need to be added to .mm compilations.""" + self.configname = configname + cflags_objcc = [] + if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'): + cflags_objcc.append('-fobjc-call-cxx-cdtors') + self.configname = None + return cflags_objcc + + def GetLdflags(self, configname, product_dir, gyp_to_build_path): + """Returns flags that need to be passed to the linker. + + Args: + configname: The name of the configuration to get ld flags for. + product_dir: The directory where products such static and dynamic + libraries are placed. This is added to the library search path. + gyp_to_build_path: A function that converts paths relative to the + current gyp file to paths relative to the build direcotry. + """ + self.configname = configname + ldflags = [] + + # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS + # contains two entries that depend on this. Explicitly absolutify for these + # two cases. + def MapGypPathWithPrefix(flag, prefix): + if flag.startswith(prefix): + flag = prefix + gyp_to_build_path(flag[len(prefix):]) + return flag + for ldflag in self._Settings().get('OTHER_LDFLAGS', []): + # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS, + # TODO(thakis): Update ffmpeg.gyp): + ldflag = MapGypPathWithPrefix(ldflag, '-L') + # Required for the nacl plugin: + ldflag = MapGypPathWithPrefix(ldflag, '-Wl,-exported_symbols_list ') + ldflags.append(ldflag) + + if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'): + ldflags.append('-Wl,-dead_strip') + + if self._Test('PREBINDING', 'YES', default='NO'): + ldflags.append('-Wl,-prebind') + + self._Appendf( + ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s') + self._Appendf( + ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s') + self._Appendf( + ldflags, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') + if 'SDKROOT' in self._Settings(): + ldflags.append('-isysroot ' + self._SdkPath()) + + for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []): + ldflags.append('-L' + gyp_to_build_path(library_path)) + + if 'ORDER_FILE' in self._Settings(): + ldflags.append('-Wl,-order_file ' + + '-Wl,' + gyp_to_build_path( + self._Settings()['ORDER_FILE'])) + + archs = self._Settings().get('ARCHS', ['i386']) + if len(archs) != 1: + # TODO: Supporting fat binaries will be annoying. + self._WarnUnimplemented('ARCHS') + archs = ['i386'] + ldflags.append('-arch ' + archs[0]) + + # Xcode adds the product directory by default. + ldflags.append('-L' + product_dir) + + install_name = self.GetPerTargetSetting('LD_DYLIB_INSTALL_NAME') + install_base = self.GetPerTargetSetting('DYLIB_INSTALL_NAME_BASE') + default_install_name = \ + '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)' + if not install_name and install_base: + install_name = default_install_name + + if install_name: + # Hardcode support for the variables used in chromium for now, to unblock + # people using the make build. + if '$' in install_name: + assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/' + '$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), ( + 'Variables in LD_DYLIB_INSTALL_NAME are not generally supported yet' + ' in target \'%s\' (got \'%s\')' % + (self.spec['target_name'], install_name)) + # I'm not quite sure what :standardizepath does. Just call normpath(), + # but don't let @executable_path/../foo collapse to foo. + if '/' in install_base: + prefix, rest = '', install_base + if install_base.startswith('@'): + prefix, rest = install_base.split('/', 1) + rest = os.path.normpath(rest) # :standardizepath + install_base = os.path.join(prefix, rest) + + install_name = install_name.replace( + '$(DYLIB_INSTALL_NAME_BASE:standardizepath)', install_base) + if self._IsBundle(): + # These are only valid for bundles, hence the |if|. + install_name = install_name.replace( + '$(WRAPPER_NAME)', self.GetWrapperName()) + install_name = install_name.replace( + '$(PRODUCT_NAME)', self.GetProductName()) + else: + assert '$(WRAPPER_NAME)' not in install_name + assert '$(PRODUCT_NAME)' not in install_name + + install_name = install_name.replace( + '$(EXECUTABLE_PATH)', self.GetExecutablePath()) + + install_name = install_name.replace(' ', r'\ ') + ldflags.append('-install_name ' + install_name) + + self.configname = None + return ldflags + + def GetPerTargetSettings(self): + """Gets a list of all the per-target settings. This will only fetch keys + whose values are the same across all configurations.""" + first_pass = True + result = {} + for configname in sorted(self.xcode_settings.keys()): + if first_pass: + result = dict(self.xcode_settings[configname]) + first_pass = False + else: + for key, value in self.xcode_settings[configname].iteritems(): + if key not in result: + continue + elif result[key] != value: + del result[key] + return result + + def GetPerTargetSetting(self, setting, default=None): + """Tries to get xcode_settings.setting from spec. Assumes that the setting + has the same value in all configurations and throws otherwise.""" + first_pass = True + result = None + for configname in sorted(self.xcode_settings.keys()): + if first_pass: + result = self.xcode_settings[configname].get(setting, None) + first_pass = False + else: + assert result == self.xcode_settings[configname].get(setting, None), ( + "Expected per-target setting for '%s', got per-config setting " + "(target %s)" % (setting, spec['target_name'])) + if result is None: + return default + return result + + def _GetStripPostbuilds(self, configname, output_binary, quiet): + """Returns a list of shell commands that contain the shell commands + neccessary to strip this target's binary. These should be run as postbuilds + before the actual postbuilds run.""" + self.configname = configname + + result = [] + if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and + self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')): + + default_strip_style = 'debugging' + if self._IsBundle(): + default_strip_style = 'non-global' + elif self.spec['type'] == 'executable': + default_strip_style = 'all' + + strip_style = self._Settings().get('STRIP_STYLE', default_strip_style) + strip_flags = { + 'all': '', + 'non-global': '-x', + 'debugging': '-S', + }[strip_style] + + explicit_strip_flags = self._Settings().get('STRIPFLAGS', '') + if explicit_strip_flags: + strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags) + + if not quiet: + result.append('echo STRIP\\(%s\\)' % self.spec['target_name']) + result.append('strip %s %s' % (strip_flags, output_binary)) + + self.configname = None + return result + + def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet): + """Returns a list of shell commands that contain the shell commands + neccessary to massage this target's debug information. These should be run + as postbuilds before the actual postbuilds run.""" + self.configname = configname + + # For static libraries, no dSYMs are created. + result = [] + if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and + self._Test( + 'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and + self.spec['type'] != 'static_library'): + if not quiet: + result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name']) + result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM')) + + self.configname = None + return result + + def GetTargetPostbuilds(self, configname, output, output_binary, quiet=False): + """Returns a list of shell commands that contain the shell commands + to run as postbuilds for this target, before the actual postbuilds.""" + # dSYMs need to build before stripping happens. + return ( + self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) + + self._GetStripPostbuilds(configname, output_binary, quiet)) + + def _AdjustLibrary(self, library): + if library.endswith('.framework'): + l = '-framework ' + os.path.splitext(os.path.basename(library))[0] + else: + m = self.library_re.match(library) + if m: + l = '-l' + m.group(1) + else: + l = library + return l.replace('$(SDKROOT)', self._SdkPath()) + + def AdjustLibraries(self, libraries): + """Transforms entries like 'Cocoa.framework' in libraries into entries like + '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc. + """ + libraries = [ self._AdjustLibrary(library) for library in libraries] + return libraries + + +class MacPrefixHeader(object): + """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature. + + This feature consists of several pieces: + * If GCC_PREFIX_HEADER is present, all compilations in that project get an + additional |-include path_to_prefix_header| cflag. + * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is + instead compiled, and all other compilations in the project get an + additional |-include path_to_compiled_header| instead. + + Compiled prefix headers have the extension gch. There is one gch file for + every language used in the project (c, cc, m, mm), since gch files for + different languages aren't compatible. + + gch files themselves are built with the target's normal cflags, but they + obviously don't get the |-include| flag. Instead, they need a -x flag that + describes their language. + + All o files in the target need to depend on the gch file, to make sure + it's built before any o file is built. + + This class helps with some of these tasks, but it needs help from the build + system for writing dependencies to the gch files, for writing build commands + for the gch files, and for figuring out the location of the gch files. + """ + def __init__(self, xcode_settings, + gyp_path_to_build_path, gyp_path_to_build_output): + """If xcode_settings is None, all methods on this class are no-ops. + + Args: + gyp_path_to_build_path: A function that takes a gyp-relative path, + and returns a path relative to the build directory. + gyp_path_to_build_output: A function that takes a gyp-relative path and + a language code ('c', 'cc', 'm', or 'mm'), and that returns a path + to where the output of precompiling that path for that language + should be placed (without the trailing '.gch'). + """ + # This doesn't support per-configuration prefix headers. Good enough + # for now. + self.header = None + self.compile_headers = False + if xcode_settings: + self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER') + self.compile_headers = xcode_settings.GetPerTargetSetting( + 'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO' + self.compiled_headers = {} + if self.header: + if self.compile_headers: + for lang in ['c', 'cc', 'm', 'mm']: + self.compiled_headers[lang] = gyp_path_to_build_output( + self.header, lang) + self.header = gyp_path_to_build_path(self.header) + + def GetInclude(self, lang): + """Gets the cflags to include the prefix header for language |lang|.""" + if self.compile_headers and lang in self.compiled_headers: + return '-include %s' % self.compiled_headers[lang] + elif self.header: + return '-include %s' % self.header + else: + return '' + + def _Gch(self, lang): + """Returns the actual file name of the prefix header for language |lang|.""" + assert self.compile_headers + return self.compiled_headers[lang] + '.gch' + + def GetObjDependencies(self, sources, objs): + """Given a list of source files and the corresponding object files, returns + a list of (source, object, gch) tuples, where |gch| is the build-directory + relative path to the gch file each object file depends on. |compilable[i]| + has to be the source file belonging to |objs[i]|.""" + if not self.header or not self.compile_headers: + return [] + + result = [] + for source, obj in zip(sources, objs): + ext = os.path.splitext(source)[1] + lang = { + '.c': 'c', + '.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc', + '.m': 'm', + '.mm': 'mm', + }.get(ext, None) + if lang: + result.append((source, obj, self._Gch(lang))) + return result + + def GetGchBuildCommands(self): + """Returns [(path_to_gch, language_flag, language, header)]. + |path_to_gch| and |header| are relative to the build directory. + """ + if not self.header or not self.compile_headers: + return [] + return [ + (self._Gch('c'), '-x c-header', 'c', self.header), + (self._Gch('cc'), '-x c++-header', 'cc', self.header), + (self._Gch('m'), '-x objective-c-header', 'm', self.header), + (self._Gch('mm'), '-x objective-c++-header', 'mm', self.header), + ] + + +def MergeGlobalXcodeSettingsToSpec(global_dict, spec): + """Merges the global xcode_settings dictionary into each configuration of the + target represented by spec. For keys that are both in the global and the local + xcode_settings dict, the local key gets precendence. + """ + # The xcode generator special-cases global xcode_settings and does something + # that amounts to merging in the global xcode_settings into each local + # xcode_settings dict. + global_xcode_settings = global_dict.get('xcode_settings', {}) + for config in spec['configurations'].values(): + if 'xcode_settings' in config: + new_settings = global_xcode_settings.copy() + new_settings.update(config['xcode_settings']) + config['xcode_settings'] = new_settings + + +def IsMacBundle(flavor, spec): + """Returns if |spec| should be treated as a bundle. + + Bundles are directories with a certain subdirectory structure, instead of + just a single file. Bundle rules do not produce a binary but also package + resources into that directory.""" + is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac') + if is_mac_bundle: + assert spec['type'] != 'none', ( + 'mac_bundle targets cannot have type none (target "%s")' % + spec['target_name']) + return is_mac_bundle + + +def GetMacBundleResources(product_dir, xcode_settings, resources): + """Yields (output, resource) pairs for every resource in |resources|. + Only call this for mac bundle targets. + + Args: + product_dir: Path to the directory containing the output bundle, + relative to the build directory. + xcode_settings: The XcodeSettings of the current target. + resources: A list of bundle resources, relative to the build directory. + """ + dest = os.path.join(product_dir, + xcode_settings.GetBundleResourceFolder()) + for res in resources: + output = dest + + # The make generator doesn't support it, so forbid it everywhere + # to keep the generators more interchangable. + assert ' ' not in res, ( + "Spaces in resource filenames not supported (%s)" % res) + + # Split into (path,file). + res_parts = os.path.split(res) + + # Now split the path into (prefix,maybe.lproj). + lproj_parts = os.path.split(res_parts[0]) + # If the resource lives in a .lproj bundle, add that to the destination. + if lproj_parts[1].endswith('.lproj'): + output = os.path.join(output, lproj_parts[1]) + + output = os.path.join(output, res_parts[1]) + # Compiled XIB files are referred to by .nib. + if output.endswith('.xib'): + output = output[0:-3] + 'nib' + + yield output, res + + +def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path): + """Returns (info_plist, dest_plist, defines, extra_env), where: + * |info_plist| is the sourc plist path, relative to the + build directory, + * |dest_plist| is the destination plist path, relative to the + build directory, + * |defines| is a list of preprocessor defines (empty if the plist + shouldn't be preprocessed, + * |extra_env| is a dict of env variables that should be exported when + invoking |mac_tool copy-info-plist|. + + Only call this for mac bundle targets. + + Args: + product_dir: Path to the directory containing the output bundle, + relative to the build directory. + xcode_settings: The XcodeSettings of the current target. + gyp_to_build_path: A function that converts paths relative to the + current gyp file to paths relative to the build direcotry. + """ + info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE') + if not info_plist: + return None, None, [], {} + + # The make generator doesn't support it, so forbid it everywhere + # to keep the generators more interchangable. + assert ' ' not in info_plist, ( + "Spaces in Info.plist filenames not supported (%s)" % info_plist) + + info_plist = gyp_path_to_build_path(info_plist) + + # If explicitly set to preprocess the plist, invoke the C preprocessor and + # specify any defines as -D flags. + if xcode_settings.GetPerTargetSetting( + 'INFOPLIST_PREPROCESS', default='NO') == 'YES': + # Create an intermediate file based on the path. + defines = shlex.split(xcode_settings.GetPerTargetSetting( + 'INFOPLIST_PREPROCESSOR_DEFINITIONS', default='')) + else: + defines = [] + + dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath()) + extra_env = xcode_settings.GetPerTargetSettings() + + return info_plist, dest_plist, defines, extra_env + + +def GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, + additional_settings=None): + """Return the environment variables that Xcode would set. See + http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 + for a full list. + + Args: + xcode_settings: An XcodeSettings object. If this is None, this function + returns an empty dict. + built_products_dir: Absolute path to the built products dir. + srcroot: Absolute path to the source root. + configuration: The build configuration name. + additional_settings: An optional dict with more values to add to the + result. + """ + if not xcode_settings: return {} + + # This function is considered a friend of XcodeSettings, so let it reach into + # its implementation details. + spec = xcode_settings.spec + + # These are filled in on a as-needed basis. + env = { + 'BUILT_PRODUCTS_DIR' : built_products_dir, + 'CONFIGURATION' : configuration, + 'PRODUCT_NAME' : xcode_settings.GetProductName(), + # See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME + 'SRCROOT' : srcroot, + 'SOURCE_ROOT': '${SRCROOT}', + # This is not true for static libraries, but currently the env is only + # written for bundles: + 'TARGET_BUILD_DIR' : built_products_dir, + 'TEMP_DIR' : '${TMPDIR}', + } + if spec['type'] in ( + 'executable', 'static_library', 'shared_library', 'loadable_module'): + env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName() + env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath() + env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName() + mach_o_type = xcode_settings.GetMachOType() + if mach_o_type: + env['MACH_O_TYPE'] = mach_o_type + env['PRODUCT_TYPE'] = xcode_settings.GetProductType() + if xcode_settings._IsBundle(): + env['CONTENTS_FOLDER_PATH'] = \ + xcode_settings.GetBundleContentsFolderPath() + env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \ + xcode_settings.GetBundleResourceFolder() + env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath() + env['WRAPPER_NAME'] = xcode_settings.GetWrapperName() + + if not additional_settings: + additional_settings = {} + else: + # Flatten lists to strings. + for k in additional_settings: + if not isinstance(additional_settings[k], str): + additional_settings[k] = ' '.join(additional_settings[k]) + additional_settings.update(env) + + for k in additional_settings: + additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k]) + + return additional_settings + + +def _NormalizeEnvVarReferences(str): + """Takes a string containing variable references in the form ${FOO}, $(FOO), + or $FOO, and returns a string with all variable references in the form ${FOO}. + """ + # $FOO -> ${FOO} + str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str) + + # $(FOO) -> ${FOO} + matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str) + for match in matches: + to_replace, variable = match + assert '$(' not in match, '$($(FOO)) variables not supported: ' + match + str = str.replace(to_replace, '${' + variable + '}') + + return str + + +def ExpandEnvVars(string, expansions): + """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the + expansions dict. If the variable expands to something that references + another variable, this variable is expanded as well if it's in env -- + until no variables present in env are left.""" + for k in reversed(TopologicallySortedEnvVarKeys(expansions)): + string = string.replace('${' + k + '}', expansions[k]) + string = string.replace('$(' + k + ')', expansions[k]) + string = string.replace('$' + k, expansions[k]) + return string + + +def TopologicallySortedEnvVarKeys(env): + """Takes a dict |env| whose values are strings that can refer to other keys, + for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of + env such that key2 is after key1 in L if env[key2] refers to env[key1]. + + Throws an Exception in case of dependency cycles. + """ + # Since environment variables can refer to other variables, the evaluation + # order is important. Below is the logic to compute the dependency graph + # and sort it. + regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}') + + # First sort the list of keys. + key_list = sorted(env.keys()) + + # Phase 1: Create a set of edges of (DEPENDEE, DEPENDER) where in the graph, + # DEPENDEE -> DEPENDER. Also create sets of dependers and dependees. + edges = set() + dependees = set() + dependers = set() + for k in key_list: + matches = regex.findall(env[k]) + if not len(matches): + continue + + depends_on_other_var = False + for dependee in matches: + assert '${' not in dependee, 'Nested variables not supported: ' + dependee + if dependee in env: + edges.add((dependee, k)) + dependees.add(dependee) + depends_on_other_var = True + if depends_on_other_var: + dependers.add(k) + + # Phase 2: Create a list of graph nodes with no incoming edges. + sorted_nodes = [] + edgeless_nodes = dependees - dependers + + # Phase 3: Perform Kahn topological sort. + while len(edgeless_nodes): + # Find a node with no incoming edges, add it to the sorted list, and + # remove it from the list of nodes that aren't part of the graph. + node = edgeless_nodes.pop() + sorted_nodes.append(node) + key_list.remove(node) + + # Find all the edges between |node| and other nodes. + edges_to_node = [e for e in edges if e[0] == node] + for edge in edges_to_node: + edges.remove(edge) + # If the node connected to |node| by |edge| has no other incoming edges, + # add it to |edgeless_nodes|. + if not len([e for e in edges if e[1] == edge[1]]): + edgeless_nodes.add(edge[1]) + + # Any remaining edges indicate a cycle. + if len(edges): + raise Exception('Xcode environment variables are cyclically dependent: ' + + str(edges)) + + # Append the "nodes" not in the graph to those that were just sorted. + sorted_nodes.extend(key_list) + + return sorted_nodes + +def GetSpecPostbuildCommands(spec, gyp_path_to_build_path, quiet=False): + """Returns the list of postbuilds explicitly defined on |spec|, in a form + executable by a shell.""" + postbuilds = [] + for postbuild in spec.get('postbuilds', []): + if not quiet: + postbuilds.append('echo POSTBUILD\\(%s\\) %s' % ( + spec['target_name'], postbuild['postbuild_name'])) + shell_list = postbuild['action'][:] + # The first element is the command. If it's a relative path, it's + # a script in the source tree relative to the gyp file and needs to be + # absolutified. Else, it's in the PATH (e.g. install_name_tool, ln). + if os.path.sep in shell_list[0]: + shell_list[0] = gyp_path_to_build_path(shell_list[0]) + + # "script.sh" -> "./script.sh" + if not os.path.sep in shell_list[0]: + shell_list[0] = os.path.join('.', shell_list[0]) + postbuilds.append(gyp.common.EncodePOSIXShellList(shell_list)) + + return postbuilds diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/xcodeproj_file.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/xcodeproj_file.py new file mode 100644 index 0000000000..f6ee765d59 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/xcodeproj_file.py @@ -0,0 +1,2838 @@ +# Copyright (c) 2009 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Xcode project file generator. + +This module is both an Xcode project file generator and a documentation of the +Xcode project file format. Knowledge of the project file format was gained +based on extensive experience with Xcode, and by making changes to projects in +Xcode.app and observing the resultant changes in the associated project files. + +XCODE PROJECT FILES + +The generator targets the file format as written by Xcode 3.2 (specifically, +3.2.6), but past experience has taught that the format has not changed +significantly in the past several years, and future versions of Xcode are able +to read older project files. + +Xcode project files are "bundled": the project "file" from an end-user's +perspective is actually a directory with an ".xcodeproj" extension. The +project file from this module's perspective is actually a file inside this +directory, always named "project.pbxproj". This file contains a complete +description of the project and is all that is needed to use the xcodeproj. +Other files contained in the xcodeproj directory are simply used to store +per-user settings, such as the state of various UI elements in the Xcode +application. + +The project.pbxproj file is a property list, stored in a format almost +identical to the NeXTstep property list format. The file is able to carry +Unicode data, and is encoded in UTF-8. The root element in the property list +is a dictionary that contains several properties of minimal interest, and two +properties of immense interest. The most important property is a dictionary +named "objects". The entire structure of the project is represented by the +children of this property. The objects dictionary is keyed by unique 96-bit +values represented by 24 uppercase hexadecimal characters. Each value in the +objects dictionary is itself a dictionary, describing an individual object. + +Each object in the dictionary is a member of a class, which is identified by +the "isa" property of each object. A variety of classes are represented in a +project file. Objects can refer to other objects by ID, using the 24-character +hexadecimal object key. A project's objects form a tree, with a root object +of class PBXProject at the root. As an example, the PBXProject object serves +as parent to an XCConfigurationList object defining the build configurations +used in the project, a PBXGroup object serving as a container for all files +referenced in the project, and a list of target objects, each of which defines +a target in the project. There are several different types of target object, +such as PBXNativeTarget and PBXAggregateTarget. In this module, this +relationship is expressed by having each target type derive from an abstract +base named XCTarget. + +The project.pbxproj file's root dictionary also contains a property, sibling to +the "objects" dictionary, named "rootObject". The value of rootObject is a +24-character object key referring to the root PBXProject object in the +objects dictionary. + +In Xcode, every file used as input to a target or produced as a final product +of a target must appear somewhere in the hierarchy rooted at the PBXGroup +object referenced by the PBXProject's mainGroup property. A PBXGroup is +generally represented as a folder in the Xcode application. PBXGroups can +contain other PBXGroups as well as PBXFileReferences, which are pointers to +actual files. + +Each XCTarget contains a list of build phases, represented in this module by +the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations +are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the +"Compile Sources" and "Link Binary With Libraries" phases displayed in the +Xcode application. Files used as input to these phases (for example, source +files in the former case and libraries and frameworks in the latter) are +represented by PBXBuildFile objects, referenced by elements of "files" lists +in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile +object as a "weak" reference: it does not "own" the PBXBuildFile, which is +owned by the root object's mainGroup or a descendant group. In most cases, the +layer of indirection between an XCBuildPhase and a PBXFileReference via a +PBXBuildFile appears extraneous, but there's actually one reason for this: +file-specific compiler flags are added to the PBXBuildFile object so as to +allow a single file to be a member of multiple targets while having distinct +compiler flags for each. These flags can be modified in the Xcode applciation +in the "Build" tab of a File Info window. + +When a project is open in the Xcode application, Xcode will rewrite it. As +such, this module is careful to adhere to the formatting used by Xcode, to +avoid insignificant changes appearing in the file when it is used in the +Xcode application. This will keep version control repositories happy, and +makes it possible to compare a project file used in Xcode to one generated by +this module to determine if any significant changes were made in the +application. + +Xcode has its own way of assigning 24-character identifiers to each object, +which is not duplicated here. Because the identifier only is only generated +once, when an object is created, and is then left unchanged, there is no need +to attempt to duplicate Xcode's behavior in this area. The generator is free +to select any identifier, even at random, to refer to the objects it creates, +and Xcode will retain those identifiers and use them when subsequently +rewriting the project file. However, the generator would choose new random +identifiers each time the project files are generated, leading to difficulties +comparing "used" project files to "pristine" ones produced by this module, +and causing the appearance of changes as every object identifier is changed +when updated projects are checked in to a version control repository. To +mitigate this problem, this module chooses identifiers in a more deterministic +way, by hashing a description of each object as well as its parent and ancestor +objects. This strategy should result in minimal "shift" in IDs as successive +generations of project files are produced. + +THIS MODULE + +This module introduces several classes, all derived from the XCObject class. +Nearly all of the "brains" are built into the XCObject class, which understands +how to create and modify objects, maintain the proper tree structure, compute +identifiers, and print objects. For the most part, classes derived from +XCObject need only provide a _schema class object, a dictionary that +expresses what properties objects of the class may contain. + +Given this structure, it's possible to build a minimal project file by creating +objects of the appropriate types and making the proper connections: + + config_list = XCConfigurationList() + group = PBXGroup() + project = PBXProject({'buildConfigurationList': config_list, + 'mainGroup': group}) + +With the project object set up, it can be added to an XCProjectFile object. +XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject +subclass that does not actually correspond to a class type found in a project +file. Rather, it is used to represent the project file's root dictionary. +Printing an XCProjectFile will print the entire project file, including the +full "objects" dictionary. + + project_file = XCProjectFile({'rootObject': project}) + project_file.ComputeIDs() + project_file.Print() + +Xcode project files are always encoded in UTF-8. This module will accept +strings of either the str class or the unicode class. Strings of class str +are assumed to already be encoded in UTF-8. Obviously, if you're just using +ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset. +Strings of class unicode are handled properly and encoded in UTF-8 when +a project file is output. +""" + +import gyp.common +import posixpath +import re +import struct +import sys + +# hashlib is supplied as of Python 2.5 as the replacement interface for sha +# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if +# available, avoiding a deprecation warning under 2.6. Import sha otherwise, +# preserving 2.4 compatibility. +try: + import hashlib + _new_sha1 = hashlib.sha1 +except ImportError: + import sha + _new_sha1 = sha.new + + +# See XCObject._EncodeString. This pattern is used to determine when a string +# can be printed unquoted. Strings that match this pattern may be printed +# unquoted. Strings that do not match must be quoted and may be further +# transformed to be properly encoded. Note that this expression matches the +# characters listed with "+", for 1 or more occurrences: if a string is empty, +# it must not match this pattern, because it needs to be encoded as "". +_unquoted = re.compile('^[A-Za-z0-9$./_]+$') + +# Strings that match this pattern are quoted regardless of what _unquoted says. +# Oddly, Xcode will quote any string with a run of three or more underscores. +_quoted = re.compile('___') + +# This pattern should match any character that needs to be escaped by +# XCObject._EncodeString. See that function. +_escaped = re.compile('[\\\\"]|[^ -~]') + + +# Used by SourceTreeAndPathFromPath +_path_leading_variable = re.compile('^\$\((.*?)\)(/(.*))?$') + +def SourceTreeAndPathFromPath(input_path): + """Given input_path, returns a tuple with sourceTree and path values. + + Examples: + input_path (source_tree, output_path) + '$(VAR)/path' ('VAR', 'path') + '$(VAR)' ('VAR', None) + 'path' (None, 'path') + """ + + source_group_match = _path_leading_variable.match(input_path) + if source_group_match: + source_tree = source_group_match.group(1) + output_path = source_group_match.group(3) # This may be None. + else: + source_tree = None + output_path = input_path + + return (source_tree, output_path) + +def ConvertVariablesToShellSyntax(input_string): + return re.sub('\$\((.*?)\)', '${\\1}', input_string) + +class XCObject(object): + """The abstract base of all class types used in Xcode project files. + + Class variables: + _schema: A dictionary defining the properties of this class. The keys to + _schema are string property keys as used in project files. Values + are a list of four or five elements: + [ is_list, property_type, is_strong, is_required, default ] + is_list: True if the property described is a list, as opposed + to a single element. + property_type: The type to use as the value of the property, + or if is_list is True, the type to use for each + element of the value's list. property_type must + be an XCObject subclass, or one of the built-in + types str, int, or dict. + is_strong: If property_type is an XCObject subclass, is_strong + is True to assert that this class "owns," or serves + as parent, to the property value (or, if is_list is + True, values). is_strong must be False if + property_type is not an XCObject subclass. + is_required: True if the property is required for the class. + Note that is_required being True does not preclude + an empty string ("", in the case of property_type + str) or list ([], in the case of is_list True) from + being set for the property. + default: Optional. If is_requried is True, default may be set + to provide a default value for objects that do not supply + their own value. If is_required is True and default + is not provided, users of the class must supply their own + value for the property. + Note that although the values of the array are expressed in + boolean terms, subclasses provide values as integers to conserve + horizontal space. + _should_print_single_line: False in XCObject. Subclasses whose objects + should be written to the project file in the + alternate single-line format, such as + PBXFileReference and PBXBuildFile, should + set this to True. + _encode_transforms: Used by _EncodeString to encode unprintable characters. + The index into this list is the ordinal of the + character to transform; each value is a string + used to represent the character in the output. XCObject + provides an _encode_transforms list suitable for most + XCObject subclasses. + _alternate_encode_transforms: Provided for subclasses that wish to use + the alternate encoding rules. Xcode seems + to use these rules when printing objects in + single-line format. Subclasses that desire + this behavior should set _encode_transforms + to _alternate_encode_transforms. + _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs + to construct this object's ID. Most classes that need custom + hashing behavior should do it by overriding Hashables, + but in some cases an object's parent may wish to push a + hashable value into its child, and it can do so by appending + to _hashables. + Attribues: + id: The object's identifier, a 24-character uppercase hexadecimal string. + Usually, objects being created should not set id until the entire + project file structure is built. At that point, UpdateIDs() should + be called on the root object to assign deterministic values for id to + each object in the tree. + parent: The object's parent. This is set by a parent XCObject when a child + object is added to it. + _properties: The object's property dictionary. An object's properties are + described by its class' _schema variable. + """ + + _schema = {} + _should_print_single_line = False + + # See _EncodeString. + _encode_transforms = [] + i = 0 + while i < ord(' '): + _encode_transforms.append('\\U%04x' % i) + i = i + 1 + _encode_transforms[7] = '\\a' + _encode_transforms[8] = '\\b' + _encode_transforms[9] = '\\t' + _encode_transforms[10] = '\\n' + _encode_transforms[11] = '\\v' + _encode_transforms[12] = '\\f' + _encode_transforms[13] = '\\n' + + _alternate_encode_transforms = list(_encode_transforms) + _alternate_encode_transforms[9] = chr(9) + _alternate_encode_transforms[10] = chr(10) + _alternate_encode_transforms[11] = chr(11) + + def __init__(self, properties=None, id=None, parent=None): + self.id = id + self.parent = parent + self._properties = {} + self._hashables = [] + self._SetDefaultsFromSchema() + self.UpdateProperties(properties) + + def __repr__(self): + try: + name = self.Name() + except NotImplementedError: + return '<%s at 0x%x>' % (self.__class__.__name__, id(self)) + return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) + + def Copy(self): + """Make a copy of this object. + + The new object will have its own copy of lists and dicts. Any XCObject + objects owned by this object (marked "strong") will be copied in the + new object, even those found in lists. If this object has any weak + references to other XCObjects, the same references are added to the new + object without making a copy. + """ + + that = self.__class__(id=self.id, parent=self.parent) + for key, value in self._properties.iteritems(): + is_strong = self._schema[key][2] + + if isinstance(value, XCObject): + if is_strong: + new_value = value.Copy() + new_value.parent = that + that._properties[key] = new_value + else: + that._properties[key] = value + elif isinstance(value, str) or isinstance(value, unicode) or \ + isinstance(value, int): + that._properties[key] = value + elif isinstance(value, list): + if is_strong: + # If is_strong is True, each element is an XCObject, so it's safe to + # call Copy. + that._properties[key] = [] + for item in value: + new_item = item.Copy() + new_item.parent = that + that._properties[key].append(new_item) + else: + that._properties[key] = value[:] + elif isinstance(value, dict): + # dicts are never strong. + if is_strong: + raise TypeError, 'Strong dict for key ' + key + ' in ' + \ + self.__class__.__name__ + else: + that._properties[key] = value.copy() + else: + raise TypeError, 'Unexpected type ' + value.__class__.__name__ + \ + ' for key ' + key + ' in ' + self.__class__.__name__ + + return that + + def Name(self): + """Return the name corresponding to an object. + + Not all objects necessarily need to be nameable, and not all that do have + a "name" property. Override as needed. + """ + + # If the schema indicates that "name" is required, try to access the + # property even if it doesn't exist. This will result in a KeyError + # being raised for the property that should be present, which seems more + # appropriate than NotImplementedError in this case. + if 'name' in self._properties or \ + ('name' in self._schema and self._schema['name'][3]): + return self._properties['name'] + + raise NotImplementedError, \ + self.__class__.__name__ + ' must implement Name' + + def Comment(self): + """Return a comment string for the object. + + Most objects just use their name as the comment, but PBXProject uses + different values. + + The returned comment is not escaped and does not have any comment marker + strings applied to it. + """ + + return self.Name() + + def Hashables(self): + hashables = [self.__class__.__name__] + + name = self.Name() + if name != None: + hashables.append(name) + + hashables.extend(self._hashables) + + return hashables + + def ComputeIDs(self, recursive=True, overwrite=True, hash=None): + """Set "id" properties deterministically. + + An object's "id" property is set based on a hash of its class type and + name, as well as the class type and name of all ancestor objects. As + such, it is only advisable to call ComputeIDs once an entire project file + tree is built. + + If recursive is True, recurse into all descendant objects and update their + hashes. + + If overwrite is True, any existing value set in the "id" property will be + replaced. + """ + + def _HashUpdate(hash, data): + """Update hash with data's length and contents. + + If the hash were updated only with the value of data, it would be + possible for clowns to induce collisions by manipulating the names of + their objects. By adding the length, it's exceedingly less likely that + ID collisions will be encountered, intentionally or not. + """ + + hash.update(struct.pack('>i', len(data))) + hash.update(data) + + if hash == None: + hash = _new_sha1() + + hashables = self.Hashables() + assert len(hashables) > 0 + for hashable in hashables: + _HashUpdate(hash, hashable) + + if recursive: + for child in self.Children(): + child.ComputeIDs(recursive, overwrite, hash.copy()) + + if overwrite or self.id == None: + # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is + # is 160 bits. Instead of throwing out 64 bits of the digest, xor them + # into the portion that gets used. + assert hash.digest_size % 4 == 0 + digest_int_count = hash.digest_size / 4 + digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest()) + id_ints = [0, 0, 0] + for index in xrange(0, digest_int_count): + id_ints[index % 3] ^= digest_ints[index] + self.id = '%08X%08X%08X' % tuple(id_ints) + + def EnsureNoIDCollisions(self): + """Verifies that no two objects have the same ID. Checks all descendants. + """ + + ids = {} + descendants = self.Descendants() + for descendant in descendants: + if descendant.id in ids: + other = ids[descendant.id] + raise KeyError, \ + 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \ + (descendant.id, str(descendant._properties), + str(other._properties), self._properties['rootObject'].Name()) + ids[descendant.id] = descendant + + def Children(self): + """Returns a list of all of this object's owned (strong) children.""" + + children = [] + for property, attributes in self._schema.iteritems(): + (is_list, property_type, is_strong) = attributes[0:3] + if is_strong and property in self._properties: + if not is_list: + children.append(self._properties[property]) + else: + children.extend(self._properties[property]) + return children + + def Descendants(self): + """Returns a list of all of this object's descendants, including this + object. + """ + + children = self.Children() + descendants = [self] + for child in children: + descendants.extend(child.Descendants()) + return descendants + + def PBXProjectAncestor(self): + # The base case for recursion is defined at PBXProject.PBXProjectAncestor. + if self.parent: + return self.parent.PBXProjectAncestor() + return None + + def _EncodeComment(self, comment): + """Encodes a comment to be placed in the project file output, mimicing + Xcode behavior. + """ + + # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If + # the string already contains a "*/", it is turned into "(*)/". This keeps + # the file writer from outputting something that would be treated as the + # end of a comment in the middle of something intended to be entirely a + # comment. + + return '/* ' + comment.replace('*/', '(*)/') + ' */' + + def _EncodeTransform(self, match): + # This function works closely with _EncodeString. It will only be called + # by re.sub with match.group(0) containing a character matched by the + # the _escaped expression. + char = match.group(0) + + # Backslashes (\) and quotation marks (") are always replaced with a + # backslash-escaped version of the same. Everything else gets its + # replacement from the class' _encode_transforms array. + if char == '\\': + return '\\\\' + if char == '"': + return '\\"' + return self._encode_transforms[ord(char)] + + def _EncodeString(self, value): + """Encodes a string to be placed in the project file output, mimicing + Xcode behavior. + """ + + # Use quotation marks when any character outside of the range A-Z, a-z, 0-9, + # $ (dollar sign), . (period), and _ (underscore) is present. Also use + # quotation marks to represent empty strings. + # + # Escape " (double-quote) and \ (backslash) by preceding them with a + # backslash. + # + # Some characters below the printable ASCII range are encoded specially: + # 7 ^G BEL is encoded as "\a" + # 8 ^H BS is encoded as "\b" + # 11 ^K VT is encoded as "\v" + # 12 ^L NP is encoded as "\f" + # 127 ^? DEL is passed through as-is without escaping + # - In PBXFileReference and PBXBuildFile objects: + # 9 ^I HT is passed through as-is without escaping + # 10 ^J NL is passed through as-is without escaping + # 13 ^M CR is passed through as-is without escaping + # - In other objects: + # 9 ^I HT is encoded as "\t" + # 10 ^J NL is encoded as "\n" + # 13 ^M CR is encoded as "\n" rendering it indistinguishable from + # 10 ^J NL + # All other nonprintable characters within the ASCII range (0 through 127 + # inclusive) are encoded as "\U001f" referring to the Unicode code point in + # hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e". + # Characters above the ASCII range are passed through to the output encoded + # as UTF-8 without any escaping. These mappings are contained in the + # class' _encode_transforms list. + + if _unquoted.search(value) and not _quoted.search(value): + return value + + return '"' + _escaped.sub(self._EncodeTransform, value) + '"' + + def _XCPrint(self, file, tabs, line): + file.write('\t' * tabs + line) + + def _XCPrintableValue(self, tabs, value, flatten_list=False): + """Returns a representation of value that may be printed in a project file, + mimicing Xcode's behavior. + + _XCPrintableValue can handle str and int values, XCObjects (which are + made printable by returning their id property), and list and dict objects + composed of any of the above types. When printing a list or dict, and + _should_print_single_line is False, the tabs parameter is used to determine + how much to indent the lines corresponding to the items in the list or + dict. + + If flatten_list is True, single-element lists will be transformed into + strings. + """ + + printable = '' + comment = None + + if self._should_print_single_line: + sep = ' ' + element_tabs = '' + end_tabs = '' + else: + sep = '\n' + element_tabs = '\t' * (tabs + 1) + end_tabs = '\t' * tabs + + if isinstance(value, XCObject): + printable += value.id + comment = value.Comment() + elif isinstance(value, str): + printable += self._EncodeString(value) + elif isinstance(value, unicode): + printable += self._EncodeString(value.encode('utf-8')) + elif isinstance(value, int): + printable += str(value) + elif isinstance(value, list): + if flatten_list and len(value) <= 1: + if len(value) == 0: + printable += self._EncodeString('') + else: + printable += self._EncodeString(value[0]) + else: + printable = '(' + sep + for item in value: + printable += element_tabs + \ + self._XCPrintableValue(tabs + 1, item, flatten_list) + \ + ',' + sep + printable += end_tabs + ')' + elif isinstance(value, dict): + printable = '{' + sep + for item_key, item_value in sorted(value.iteritems()): + printable += element_tabs + \ + self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \ + self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \ + sep + printable += end_tabs + '}' + else: + raise TypeError, "Can't make " + value.__class__.__name__ + ' printable' + + if comment != None: + printable += ' ' + self._EncodeComment(comment) + + return printable + + def _XCKVPrint(self, file, tabs, key, value): + """Prints a key and value, members of an XCObject's _properties dictionary, + to file. + + tabs is an int identifying the indentation level. If the class' + _should_print_single_line variable is True, tabs is ignored and the + key-value pair will be followed by a space insead of a newline. + """ + + if self._should_print_single_line: + printable = '' + after_kv = ' ' + else: + printable = '\t' * tabs + after_kv = '\n' + + # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy + # objects without comments. Sometimes it prints them with comments, but + # the majority of the time, it doesn't. To avoid unnecessary changes to + # the project file after Xcode opens it, don't write comments for + # remoteGlobalIDString. This is a sucky hack and it would certainly be + # cleaner to extend the schema to indicate whether or not a comment should + # be printed, but since this is the only case where the problem occurs and + # Xcode itself can't seem to make up its mind, the hack will suffice. + # + # Also see PBXContainerItemProxy._schema['remoteGlobalIDString']. + if key == 'remoteGlobalIDString' and isinstance(self, + PBXContainerItemProxy): + value_to_print = value.id + else: + value_to_print = value + + # PBXBuildFile's settings property is represented in the output as a dict, + # but a hack here has it represented as a string. Arrange to strip off the + # quotes so that it shows up in the output as expected. + if key == 'settings' and isinstance(self, PBXBuildFile): + strip_value_quotes = True + else: + strip_value_quotes = False + + # In another one-off, let's set flatten_list on buildSettings properties + # of XCBuildConfiguration objects, because that's how Xcode treats them. + if key == 'buildSettings' and isinstance(self, XCBuildConfiguration): + flatten_list = True + else: + flatten_list = False + + try: + printable_key = self._XCPrintableValue(tabs, key, flatten_list) + printable_value = self._XCPrintableValue(tabs, value_to_print, + flatten_list) + if strip_value_quotes and len(printable_value) > 1 and \ + printable_value[0] == '"' and printable_value[-1] == '"': + printable_value = printable_value[1:-1] + printable += printable_key + ' = ' + printable_value + ';' + after_kv + except TypeError, e: + gyp.common.ExceptionAppend(e, + 'while printing key "%s"' % key) + raise + + self._XCPrint(file, 0, printable) + + def Print(self, file=sys.stdout): + """Prints a reprentation of this object to file, adhering to Xcode output + formatting. + """ + + self.VerifyHasRequiredProperties() + + if self._should_print_single_line: + # When printing an object in a single line, Xcode doesn't put any space + # between the beginning of a dictionary (or presumably a list) and the + # first contained item, so you wind up with snippets like + # ...CDEF = {isa = PBXFileReference; fileRef = 0123... + # If it were me, I would have put a space in there after the opening + # curly, but I guess this is just another one of those inconsistencies + # between how Xcode prints PBXFileReference and PBXBuildFile objects as + # compared to other objects. Mimic Xcode's behavior here by using an + # empty string for sep. + sep = '' + end_tabs = 0 + else: + sep = '\n' + end_tabs = 2 + + # Start the object. For example, '\t\tPBXProject = {\n'. + self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep) + + # "isa" isn't in the _properties dictionary, it's an intrinsic property + # of the class which the object belongs to. Xcode always outputs "isa" + # as the first element of an object dictionary. + self._XCKVPrint(file, 3, 'isa', self.__class__.__name__) + + # The remaining elements of an object dictionary are sorted alphabetically. + for property, value in sorted(self._properties.iteritems()): + self._XCKVPrint(file, 3, property, value) + + # End the object. + self._XCPrint(file, end_tabs, '};\n') + + def UpdateProperties(self, properties, do_copy=False): + """Merge the supplied properties into the _properties dictionary. + + The input properties must adhere to the class schema or a KeyError or + TypeError exception will be raised. If adding an object of an XCObject + subclass and the schema indicates a strong relationship, the object's + parent will be set to this object. + + If do_copy is True, then lists, dicts, strong-owned XCObjects, and + strong-owned XCObjects in lists will be copied instead of having their + references added. + """ + + if properties == None: + return + + for property, value in properties.iteritems(): + # Make sure the property is in the schema. + if not property in self._schema: + raise KeyError, property + ' not in ' + self.__class__.__name__ + + # Make sure the property conforms to the schema. + (is_list, property_type, is_strong) = self._schema[property][0:3] + if is_list: + if value.__class__ != list: + raise TypeError, \ + property + ' of ' + self.__class__.__name__ + \ + ' must be list, not ' + value.__class__.__name__ + for item in value: + if not isinstance(item, property_type) and \ + not (item.__class__ == unicode and property_type == str): + # Accept unicode where str is specified. str is treated as + # UTF-8-encoded. + raise TypeError, \ + 'item of ' + property + ' of ' + self.__class__.__name__ + \ + ' must be ' + property_type.__name__ + ', not ' + \ + item.__class__.__name__ + elif not isinstance(value, property_type) and \ + not (value.__class__ == unicode and property_type == str): + # Accept unicode where str is specified. str is treated as + # UTF-8-encoded. + raise TypeError, \ + property + ' of ' + self.__class__.__name__ + ' must be ' + \ + property_type.__name__ + ', not ' + value.__class__.__name__ + + # Checks passed, perform the assignment. + if do_copy: + if isinstance(value, XCObject): + if is_strong: + self._properties[property] = value.Copy() + else: + self._properties[property] = value + elif isinstance(value, str) or isinstance(value, unicode) or \ + isinstance(value, int): + self._properties[property] = value + elif isinstance(value, list): + if is_strong: + # If is_strong is True, each element is an XCObject, so it's safe + # to call Copy. + self._properties[property] = [] + for item in value: + self._properties[property].append(item.Copy()) + else: + self._properties[property] = value[:] + elif isinstance(value, dict): + self._properties[property] = value.copy() + else: + raise TypeError, "Don't know how to copy a " + \ + value.__class__.__name__ + ' object for ' + \ + property + ' in ' + self.__class__.__name__ + else: + self._properties[property] = value + + # Set up the child's back-reference to this object. Don't use |value| + # any more because it may not be right if do_copy is true. + if is_strong: + if not is_list: + self._properties[property].parent = self + else: + for item in self._properties[property]: + item.parent = self + + def HasProperty(self, key): + return key in self._properties + + def GetProperty(self, key): + return self._properties[key] + + def SetProperty(self, key, value): + self.UpdateProperties({key: value}) + + def DelProperty(self, key): + if key in self._properties: + del self._properties[key] + + def AppendProperty(self, key, value): + # TODO(mark): Support ExtendProperty too (and make this call that)? + + # Schema validation. + if not key in self._schema: + raise KeyError, key + ' not in ' + self.__class__.__name__ + + (is_list, property_type, is_strong) = self._schema[key][0:3] + if not is_list: + raise TypeError, key + ' of ' + self.__class__.__name__ + ' must be list' + if not isinstance(value, property_type): + raise TypeError, 'item of ' + key + ' of ' + self.__class__.__name__ + \ + ' must be ' + property_type.__name__ + ', not ' + \ + value.__class__.__name__ + + # If the property doesn't exist yet, create a new empty list to receive the + # item. + if not key in self._properties: + self._properties[key] = [] + + # Set up the ownership link. + if is_strong: + value.parent = self + + # Store the item. + self._properties[key].append(value) + + def VerifyHasRequiredProperties(self): + """Ensure that all properties identified as required by the schema are + set. + """ + + # TODO(mark): A stronger verification mechanism is needed. Some + # subclasses need to perform validation beyond what the schema can enforce. + for property, attributes in self._schema.iteritems(): + (is_list, property_type, is_strong, is_required) = attributes[0:4] + if is_required and not property in self._properties: + raise KeyError, self.__class__.__name__ + ' requires ' + property + + def _SetDefaultsFromSchema(self): + """Assign object default values according to the schema. This will not + overwrite properties that have already been set.""" + + defaults = {} + for property, attributes in self._schema.iteritems(): + (is_list, property_type, is_strong, is_required) = attributes[0:4] + if is_required and len(attributes) >= 5 and \ + not property in self._properties: + default = attributes[4] + + defaults[property] = default + + if len(defaults) > 0: + # Use do_copy=True so that each new object gets its own copy of strong + # objects, lists, and dicts. + self.UpdateProperties(defaults, do_copy=True) + + +class XCHierarchicalElement(XCObject): + """Abstract base for PBXGroup and PBXFileReference. Not represented in a + project file.""" + + # TODO(mark): Do name and path belong here? Probably so. + # If path is set and name is not, name may have a default value. Name will + # be set to the basename of path, if the basename of path is different from + # the full value of path. If path is already just a leaf name, name will + # not be set. + _schema = XCObject._schema.copy() + _schema.update({ + 'comments': [0, str, 0, 0], + 'fileEncoding': [0, str, 0, 0], + 'includeInIndex': [0, int, 0, 0], + 'indentWidth': [0, int, 0, 0], + 'lineEnding': [0, int, 0, 0], + 'sourceTree': [0, str, 0, 1, ''], + 'tabWidth': [0, int, 0, 0], + 'usesTabs': [0, int, 0, 0], + 'wrapsLines': [0, int, 0, 0], + }) + + def __init__(self, properties=None, id=None, parent=None): + # super + XCObject.__init__(self, properties, id, parent) + if 'path' in self._properties and not 'name' in self._properties: + path = self._properties['path'] + name = posixpath.basename(path) + if name != '' and path != name: + self.SetProperty('name', name) + + if 'path' in self._properties and \ + (not 'sourceTree' in self._properties or \ + self._properties['sourceTree'] == ''): + # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take + # the variable out and make the path be relative to that variable by + # assigning the variable name as the sourceTree. + (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path']) + if source_tree != None: + self._properties['sourceTree'] = source_tree + if path != None: + self._properties['path'] = path + if source_tree != None and path == None and \ + not 'name' in self._properties: + # The path was of the form "$(SDKROOT)" with no path following it. + # This object is now relative to that variable, so it has no path + # attribute of its own. It does, however, keep a name. + del self._properties['path'] + self._properties['name'] = source_tree + + def Name(self): + if 'name' in self._properties: + return self._properties['name'] + elif 'path' in self._properties: + return self._properties['path'] + else: + # This happens in the case of the root PBXGroup. + return None + + def Hashables(self): + """Custom hashables for XCHierarchicalElements. + + XCHierarchicalElements are special. Generally, their hashes shouldn't + change if the paths don't change. The normal XCObject implementation of + Hashables adds a hashable for each object, which means that if + the hierarchical structure changes (possibly due to changes caused when + TakeOverOnlyChild runs and encounters slight changes in the hierarchy), + the hashes will change. For example, if a project file initially contains + a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent + a/b. If someone later adds a/f2 to the project file, a/b can no longer be + collapsed, and f1 winds up with parent b and grandparent a. That would + be sufficient to change f1's hash. + + To counteract this problem, hashables for all XCHierarchicalElements except + for the main group (which has neither a name nor a path) are taken to be + just the set of path components. Because hashables are inherited from + parents, this provides assurance that a/b/f1 has the same set of hashables + whether its parent is b or a/b. + + The main group is a special case. As it is permitted to have no name or + path, it is permitted to use the standard XCObject hash mechanism. This + is not considered a problem because there can be only one main group. + """ + + if self == self.PBXProjectAncestor()._properties['mainGroup']: + # super + return XCObject.Hashables(self) + + hashables = [] + + # Put the name in first, ensuring that if TakeOverOnlyChild collapses + # children into a top-level group like "Source", the name always goes + # into the list of hashables without interfering with path components. + if 'name' in self._properties: + # Make it less likely for people to manipulate hashes by following the + # pattern of always pushing an object type value onto the list first. + hashables.append(self.__class__.__name__ + '.name') + hashables.append(self._properties['name']) + + # NOTE: This still has the problem that if an absolute path is encountered, + # including paths with a sourceTree, they'll still inherit their parents' + # hashables, even though the paths aren't relative to their parents. This + # is not expected to be much of a problem in practice. + path = self.PathFromSourceTreeAndPath() + if path != None: + components = path.split(posixpath.sep) + for component in components: + hashables.append(self.__class__.__name__ + '.path') + hashables.append(component) + + hashables.extend(self._hashables) + + return hashables + + def Compare(self, other): + # Allow comparison of these types. PBXGroup has the highest sort rank; + # PBXVariantGroup is treated as equal to PBXFileReference. + valid_class_types = { + PBXFileReference: 'file', + PBXGroup: 'group', + PBXVariantGroup: 'file', + } + self_type = valid_class_types[self.__class__] + other_type = valid_class_types[other.__class__] + + if self_type == other_type: + # If the two objects are of the same sort rank, compare their names. + return cmp(self.Name(), other.Name()) + + # Otherwise, sort groups before everything else. + if self_type == 'group': + return -1 + return 1 + + def CompareRootGroup(self, other): + # This function should be used only to compare direct children of the + # containing PBXProject's mainGroup. These groups should appear in the + # listed order. + # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the + # generator should have a way of influencing this list rather than having + # to hardcode for the generator here. + order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products', + 'Build'] + + # If the groups aren't in the listed order, do a name comparison. + # Otherwise, groups in the listed order should come before those that + # aren't. + self_name = self.Name() + other_name = other.Name() + self_in = isinstance(self, PBXGroup) and self_name in order + other_in = isinstance(self, PBXGroup) and other_name in order + if not self_in and not other_in: + return self.Compare(other) + if self_name in order and not other_name in order: + return -1 + if other_name in order and not self_name in order: + return 1 + + # If both groups are in the listed order, go by the defined order. + self_index = order.index(self_name) + other_index = order.index(other_name) + if self_index < other_index: + return -1 + if self_index > other_index: + return 1 + return 0 + + def PathFromSourceTreeAndPath(self): + # Turn the object's sourceTree and path properties into a single flat + # string of a form comparable to the path parameter. If there's a + # sourceTree property other than "", wrap it in $(...) for the + # comparison. + components = [] + if self._properties['sourceTree'] != '': + components.append('$(' + self._properties['sourceTree'] + ')') + if 'path' in self._properties: + components.append(self._properties['path']) + + if len(components) > 0: + return posixpath.join(*components) + + return None + + def FullPath(self): + # Returns a full path to self relative to the project file, or relative + # to some other source tree. Start with self, and walk up the chain of + # parents prepending their paths, if any, until no more parents are + # available (project-relative path) or until a path relative to some + # source tree is found. + xche = self + path = None + while isinstance(xche, XCHierarchicalElement) and \ + (path == None or \ + (not path.startswith('/') and not path.startswith('$'))): + this_path = xche.PathFromSourceTreeAndPath() + if this_path != None and path != None: + path = posixpath.join(this_path, path) + elif this_path != None: + path = this_path + xche = xche.parent + + return path + + +class PBXGroup(XCHierarchicalElement): + """ + Attributes: + _children_by_path: Maps pathnames of children of this PBXGroup to the + actual child XCHierarchicalElement objects. + _variant_children_by_name_and_path: Maps (name, path) tuples of + PBXVariantGroup children to the actual child PBXVariantGroup objects. + """ + + _schema = XCHierarchicalElement._schema.copy() + _schema.update({ + 'children': [1, XCHierarchicalElement, 1, 1, []], + 'name': [0, str, 0, 0], + 'path': [0, str, 0, 0], + }) + + def __init__(self, properties=None, id=None, parent=None): + # super + XCHierarchicalElement.__init__(self, properties, id, parent) + self._children_by_path = {} + self._variant_children_by_name_and_path = {} + for child in self._properties.get('children', []): + self._AddChildToDicts(child) + + def _AddChildToDicts(self, child): + # Sets up this PBXGroup object's dicts to reference the child properly. + child_path = child.PathFromSourceTreeAndPath() + if child_path: + if child_path in self._children_by_path: + raise ValueError, 'Found multiple children with path ' + child_path + self._children_by_path[child_path] = child + + if isinstance(child, PBXVariantGroup): + child_name = child._properties.get('name', None) + key = (child_name, child_path) + if key in self._variant_children_by_name_and_path: + raise ValueError, 'Found multiple PBXVariantGroup children with ' + \ + 'name ' + str(child_name) + ' and path ' + \ + str(child_path) + self._variant_children_by_name_and_path[key] = child + + def AppendChild(self, child): + # Callers should use this instead of calling + # AppendProperty('children', child) directly because this function + # maintains the group's dicts. + self.AppendProperty('children', child) + self._AddChildToDicts(child) + + def GetChildByName(self, name): + # This is not currently optimized with a dict as GetChildByPath is because + # it has few callers. Most callers probably want GetChildByPath. This + # function is only useful to get children that have names but no paths, + # which is rare. The children of the main group ("Source", "Products", + # etc.) is pretty much the only case where this likely to come up. + # + # TODO(mark): Maybe this should raise an error if more than one child is + # present with the same name. + if not 'children' in self._properties: + return None + + for child in self._properties['children']: + if child.Name() == name: + return child + + return None + + def GetChildByPath(self, path): + if not path: + return None + + if path in self._children_by_path: + return self._children_by_path[path] + + return None + + def GetChildByRemoteObject(self, remote_object): + # This method is a little bit esoteric. Given a remote_object, which + # should be a PBXFileReference in another project file, this method will + # return this group's PBXReferenceProxy object serving as a local proxy + # for the remote PBXFileReference. + # + # This function might benefit from a dict optimization as GetChildByPath + # for some workloads, but profiling shows that it's not currently a + # problem. + if not 'children' in self._properties: + return None + + for child in self._properties['children']: + if not isinstance(child, PBXReferenceProxy): + continue + + container_proxy = child._properties['remoteRef'] + if container_proxy._properties['remoteGlobalIDString'] == remote_object: + return child + + return None + + def AddOrGetFileByPath(self, path, hierarchical): + """Returns an existing or new file reference corresponding to path. + + If hierarchical is True, this method will create or use the necessary + hierarchical group structure corresponding to path. Otherwise, it will + look in and create an item in the current group only. + + If an existing matching reference is found, it is returned, otherwise, a + new one will be created, added to the correct group, and returned. + + If path identifies a directory by virtue of carrying a trailing slash, + this method returns a PBXFileReference of "folder" type. If path + identifies a variant, by virtue of it identifying a file inside a directory + with an ".lproj" extension, this method returns a PBXVariantGroup + containing the variant named by path, and possibly other variants. For + all other paths, a "normal" PBXFileReference will be returned. + """ + + # Adding or getting a directory? Directories end with a trailing slash. + is_dir = False + if path.endswith('/'): + is_dir = True + normpath = posixpath.normpath(path) + if is_dir: + normpath = path + '/' + else: + normpath = path + + # Adding or getting a variant? Variants are files inside directories + # with an ".lproj" extension. Xcode uses variants for localization. For + # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named + # MainMenu.nib inside path/to, and give it a variant named Language. In + # this example, grandparent would be set to path/to and parent_root would + # be set to Language. + variant_name = None + parent = posixpath.dirname(path) + grandparent = posixpath.dirname(parent) + parent_basename = posixpath.basename(parent) + (parent_root, parent_ext) = posixpath.splitext(parent_basename) + if parent_ext == '.lproj': + variant_name = parent_root + if grandparent == '': + grandparent = None + + # Putting a directory inside a variant group is not currently supported. + assert not is_dir or variant_name == None + + path_split = path.split(posixpath.sep) + if len(path_split) == 1 or \ + ((is_dir or variant_name != None) and len(path_split) == 2) or \ + not hierarchical: + # The PBXFileReference or PBXVariantGroup will be added to or gotten from + # this PBXGroup, no recursion necessary. + if variant_name == None: + # Add or get a PBXFileReference. + file_ref = self.GetChildByPath(normpath) + if file_ref != None: + assert file_ref.__class__ == PBXFileReference + else: + file_ref = PBXFileReference({'path': path}) + self.AppendChild(file_ref) + else: + # Add or get a PBXVariantGroup. The variant group name is the same + # as the basename (MainMenu.nib in the example above). grandparent + # specifies the path to the variant group itself, and path_split[-2:] + # is the path of the specific variant relative to its group. + variant_group_name = posixpath.basename(path) + variant_group_ref = self.AddOrGetVariantGroupByNameAndPath( + variant_group_name, grandparent) + variant_path = posixpath.sep.join(path_split[-2:]) + variant_ref = variant_group_ref.GetChildByPath(variant_path) + if variant_ref != None: + assert variant_ref.__class__ == PBXFileReference + else: + variant_ref = PBXFileReference({'name': variant_name, + 'path': variant_path}) + variant_group_ref.AppendChild(variant_ref) + # The caller is interested in the variant group, not the specific + # variant file. + file_ref = variant_group_ref + return file_ref + else: + # Hierarchical recursion. Add or get a PBXGroup corresponding to the + # outermost path component, and then recurse into it, chopping off that + # path component. + next_dir = path_split[0] + group_ref = self.GetChildByPath(next_dir) + if group_ref != None: + assert group_ref.__class__ == PBXGroup + else: + group_ref = PBXGroup({'path': next_dir}) + self.AppendChild(group_ref) + return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]), + hierarchical) + + def AddOrGetVariantGroupByNameAndPath(self, name, path): + """Returns an existing or new PBXVariantGroup for name and path. + + If a PBXVariantGroup identified by the name and path arguments is already + present as a child of this object, it is returned. Otherwise, a new + PBXVariantGroup with the correct properties is created, added as a child, + and returned. + + This method will generally be called by AddOrGetFileByPath, which knows + when to create a variant group based on the structure of the pathnames + passed to it. + """ + + key = (name, path) + if key in self._variant_children_by_name_and_path: + variant_group_ref = self._variant_children_by_name_and_path[key] + assert variant_group_ref.__class__ == PBXVariantGroup + return variant_group_ref + + variant_group_properties = {'name': name} + if path != None: + variant_group_properties['path'] = path + variant_group_ref = PBXVariantGroup(variant_group_properties) + self.AppendChild(variant_group_ref) + + return variant_group_ref + + def TakeOverOnlyChild(self, recurse=False): + """If this PBXGroup has only one child and it's also a PBXGroup, take + it over by making all of its children this object's children. + + This function will continue to take over only children when those children + are groups. If there are three PBXGroups representing a, b, and c, with + c inside b and b inside a, and a and b have no other children, this will + result in a taking over both b and c, forming a PBXGroup for a/b/c. + + If recurse is True, this function will recurse into children and ask them + to collapse themselves by taking over only children as well. Assuming + an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f + (d1, d2, and f are files, the rest are groups), recursion will result in + a group for a/b/c containing a group for d3/e. + """ + + # At this stage, check that child class types are PBXGroup exactly, + # instead of using isinstance. The only subclass of PBXGroup, + # PBXVariantGroup, should not participate in reparenting in the same way: + # reparenting by merging different object types would be wrong. + while len(self._properties['children']) == 1 and \ + self._properties['children'][0].__class__ == PBXGroup: + # Loop to take over the innermost only-child group possible. + + child = self._properties['children'][0] + + # Assume the child's properties, including its children. Save a copy + # of this object's old properties, because they'll still be needed. + # This object retains its existing id and parent attributes. + old_properties = self._properties + self._properties = child._properties + self._children_by_path = child._children_by_path + + if not 'sourceTree' in self._properties or \ + self._properties['sourceTree'] == '': + # The child was relative to its parent. Fix up the path. Note that + # children with a sourceTree other than "" are not relative to + # their parents, so no path fix-up is needed in that case. + if 'path' in old_properties: + if 'path' in self._properties: + # Both the original parent and child have paths set. + self._properties['path'] = posixpath.join(old_properties['path'], + self._properties['path']) + else: + # Only the original parent has a path, use it. + self._properties['path'] = old_properties['path'] + if 'sourceTree' in old_properties: + # The original parent had a sourceTree set, use it. + self._properties['sourceTree'] = old_properties['sourceTree'] + + # If the original parent had a name set, keep using it. If the original + # parent didn't have a name but the child did, let the child's name + # live on. If the name attribute seems unnecessary now, get rid of it. + if 'name' in old_properties and old_properties['name'] != None and \ + old_properties['name'] != self.Name(): + self._properties['name'] = old_properties['name'] + if 'name' in self._properties and 'path' in self._properties and \ + self._properties['name'] == self._properties['path']: + del self._properties['name'] + + # Notify all children of their new parent. + for child in self._properties['children']: + child.parent = self + + # If asked to recurse, recurse. + if recurse: + for child in self._properties['children']: + if child.__class__ == PBXGroup: + child.TakeOverOnlyChild(recurse) + + def SortGroup(self): + self._properties['children'] = \ + sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y)) + + # Recurse. + for child in self._properties['children']: + if isinstance(child, PBXGroup): + child.SortGroup() + + +class XCFileLikeElement(XCHierarchicalElement): + # Abstract base for objects that can be used as the fileRef property of + # PBXBuildFile. + + def PathHashables(self): + # A PBXBuildFile that refers to this object will call this method to + # obtain additional hashables specific to this XCFileLikeElement. Don't + # just use this object's hashables, they're not specific and unique enough + # on their own (without access to the parent hashables.) Instead, provide + # hashables that identify this object by path by getting its hashables as + # well as the hashables of ancestor XCHierarchicalElement objects. + + hashables = [] + xche = self + while xche != None and isinstance(xche, XCHierarchicalElement): + xche_hashables = xche.Hashables() + for index in xrange(0, len(xche_hashables)): + hashables.insert(index, xche_hashables[index]) + xche = xche.parent + return hashables + + +class XCContainerPortal(XCObject): + # Abstract base for objects that can be used as the containerPortal property + # of PBXContainerItemProxy. + pass + + +class XCRemoteObject(XCObject): + # Abstract base for objects that can be used as the remoteGlobalIDString + # property of PBXContainerItemProxy. + pass + + +class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject): + _schema = XCFileLikeElement._schema.copy() + _schema.update({ + 'explicitFileType': [0, str, 0, 0], + 'lastKnownFileType': [0, str, 0, 0], + 'name': [0, str, 0, 0], + 'path': [0, str, 0, 1], + }) + + # Weird output rules for PBXFileReference. + _should_print_single_line = True + # super + _encode_transforms = XCFileLikeElement._alternate_encode_transforms + + def __init__(self, properties=None, id=None, parent=None): + # super + XCFileLikeElement.__init__(self, properties, id, parent) + if 'path' in self._properties and self._properties['path'].endswith('/'): + self._properties['path'] = self._properties['path'][:-1] + is_dir = True + else: + is_dir = False + + if 'path' in self._properties and \ + not 'lastKnownFileType' in self._properties and \ + not 'explicitFileType' in self._properties: + # TODO(mark): This is the replacement for a replacement for a quick hack. + # It is no longer incredibly sucky, but this list needs to be extended. + extension_map = { + 'a': 'archive.ar', + 'app': 'wrapper.application', + 'bdic': 'file', + 'bundle': 'wrapper.cfbundle', + 'c': 'sourcecode.c.c', + 'cc': 'sourcecode.cpp.cpp', + 'cpp': 'sourcecode.cpp.cpp', + 'css': 'text.css', + 'cxx': 'sourcecode.cpp.cpp', + 'dylib': 'compiled.mach-o.dylib', + 'framework': 'wrapper.framework', + 'h': 'sourcecode.c.h', + 'hxx': 'sourcecode.cpp.h', + 'icns': 'image.icns', + 'java': 'sourcecode.java', + 'js': 'sourcecode.javascript', + 'm': 'sourcecode.c.objc', + 'mm': 'sourcecode.cpp.objcpp', + 'nib': 'wrapper.nib', + 'o': 'compiled.mach-o.objfile', + 'pdf': 'image.pdf', + 'pl': 'text.script.perl', + 'plist': 'text.plist.xml', + 'pm': 'text.script.perl', + 'png': 'image.png', + 'py': 'text.script.python', + 'r': 'sourcecode.rez', + 'rez': 'sourcecode.rez', + 's': 'sourcecode.asm', + 'strings': 'text.plist.strings', + 'ttf': 'file', + 'xcconfig': 'text.xcconfig', + 'xib': 'file.xib', + 'y': 'sourcecode.yacc', + } + + if is_dir: + file_type = 'folder' + else: + basename = posixpath.basename(self._properties['path']) + (root, ext) = posixpath.splitext(basename) + # Check the map using a lowercase extension. + # TODO(mark): Maybe it should try with the original case first and fall + # back to lowercase, in case there are any instances where case + # matters. There currently aren't. + if ext != '': + ext = ext[1:].lower() + + # TODO(mark): "text" is the default value, but "file" is appropriate + # for unrecognized files not containing text. Xcode seems to choose + # based on content. + file_type = extension_map.get(ext, 'text') + + self._properties['lastKnownFileType'] = file_type + + +class PBXVariantGroup(PBXGroup, XCFileLikeElement): + """PBXVariantGroup is used by Xcode to represent localizations.""" + # No additions to the schema relative to PBXGroup. + pass + + +# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below +# because it uses PBXContainerItemProxy, defined below. + + +class XCBuildConfiguration(XCObject): + _schema = XCObject._schema.copy() + _schema.update({ + 'baseConfigurationReference': [0, PBXFileReference, 0, 0], + 'buildSettings': [0, dict, 0, 1, {}], + 'name': [0, str, 0, 1], + }) + + def HasBuildSetting(self, key): + return key in self._properties['buildSettings'] + + def GetBuildSetting(self, key): + return self._properties['buildSettings'][key] + + def SetBuildSetting(self, key, value): + # TODO(mark): If a list, copy? + self._properties['buildSettings'][key] = value + + def AppendBuildSetting(self, key, value): + if not key in self._properties['buildSettings']: + self._properties['buildSettings'][key] = [] + self._properties['buildSettings'][key].append(value) + + def DelBuildSetting(self, key): + if key in self._properties['buildSettings']: + del self._properties['buildSettings'][key] + + def SetBaseConfiguration(self, value): + self._properties['baseConfigurationReference'] = value + +class XCConfigurationList(XCObject): + # _configs is the default list of configurations. + _configs = [ XCBuildConfiguration({'name': 'Debug'}), + XCBuildConfiguration({'name': 'Release'}) ] + + _schema = XCObject._schema.copy() + _schema.update({ + 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs], + 'defaultConfigurationIsVisible': [0, int, 0, 1, 1], + 'defaultConfigurationName': [0, str, 0, 1, 'Release'], + }) + + def Name(self): + return 'Build configuration list for ' + \ + self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"' + + def ConfigurationNamed(self, name): + """Convenience accessor to obtain an XCBuildConfiguration by name.""" + for configuration in self._properties['buildConfigurations']: + if configuration._properties['name'] == name: + return configuration + + raise KeyError, name + + def DefaultConfiguration(self): + """Convenience accessor to obtain the default XCBuildConfiguration.""" + return self.ConfigurationNamed(self._properties['defaultConfigurationName']) + + def HasBuildSetting(self, key): + """Determines the state of a build setting in all XCBuildConfiguration + child objects. + + If all child objects have key in their build settings, and the value is the + same in all child objects, returns 1. + + If no child objects have the key in their build settings, returns 0. + + If some, but not all, child objects have the key in their build settings, + or if any children have different values for the key, returns -1. + """ + + has = None + value = None + for configuration in self._properties['buildConfigurations']: + configuration_has = configuration.HasBuildSetting(key) + if has == None: + has = configuration_has + elif has != configuration_has: + return -1 + + if configuration_has: + configuration_value = configuration.GetBuildSetting(key) + if value == None: + value = configuration_value + elif value != configuration_value: + return -1 + + if not has: + return 0 + + return 1 + + def GetBuildSetting(self, key): + """Gets the build setting for key. + + All child XCConfiguration objects must have the same value set for the + setting, or a ValueError will be raised. + """ + + # TODO(mark): This is wrong for build settings that are lists. The list + # contents should be compared (and a list copy returned?) + + value = None + for configuration in self._properties['buildConfigurations']: + configuration_value = configuration.GetBuildSetting(key) + if value == None: + value = configuration_value + else: + if value != configuration_value: + raise ValueError, 'Variant values for ' + key + + return value + + def SetBuildSetting(self, key, value): + """Sets the build setting for key to value in all child + XCBuildConfiguration objects. + """ + + for configuration in self._properties['buildConfigurations']: + configuration.SetBuildSetting(key, value) + + def AppendBuildSetting(self, key, value): + """Appends value to the build setting for key, which is treated as a list, + in all child XCBuildConfiguration objects. + """ + + for configuration in self._properties['buildConfigurations']: + configuration.AppendBuildSetting(key, value) + + def DelBuildSetting(self, key): + """Deletes the build setting key from all child XCBuildConfiguration + objects. + """ + + for configuration in self._properties['buildConfigurations']: + configuration.DelBuildSetting(key) + + def SetBaseConfiguration(self, value): + """Sets the build configuration in all child XCBuildConfiguration objects. + """ + + for configuration in self._properties['buildConfigurations']: + configuration.SetBaseConfiguration(value) + + +class PBXBuildFile(XCObject): + _schema = XCObject._schema.copy() + _schema.update({ + 'fileRef': [0, XCFileLikeElement, 0, 1], + 'settings': [0, str, 0, 0], # hack, it's a dict + }) + + # Weird output rules for PBXBuildFile. + _should_print_single_line = True + _encode_transforms = XCObject._alternate_encode_transforms + + def Name(self): + # Example: "main.cc in Sources" + return self._properties['fileRef'].Name() + ' in ' + self.parent.Name() + + def Hashables(self): + # super + hashables = XCObject.Hashables(self) + + # It is not sufficient to just rely on Name() to get the + # XCFileLikeElement's name, because that is not a complete pathname. + # PathHashables returns hashables unique enough that no two + # PBXBuildFiles should wind up with the same set of hashables, unless + # someone adds the same file multiple times to the same target. That + # would be considered invalid anyway. + hashables.extend(self._properties['fileRef'].PathHashables()) + + return hashables + + +class XCBuildPhase(XCObject): + """Abstract base for build phase classes. Not represented in a project + file. + + Attributes: + _files_by_path: A dict mapping each path of a child in the files list by + path (keys) to the corresponding PBXBuildFile children (values). + _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys) + to the corresponding PBXBuildFile children (values). + """ + + # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't + # actually have a "files" list. XCBuildPhase should not have "files" but + # another abstract subclass of it should provide this, and concrete build + # phase types that do have "files" lists should be derived from that new + # abstract subclass. XCBuildPhase should only provide buildActionMask and + # runOnlyForDeploymentPostprocessing, and not files or the various + # file-related methods and attributes. + + _schema = XCObject._schema.copy() + _schema.update({ + 'buildActionMask': [0, int, 0, 1, 0x7fffffff], + 'files': [1, PBXBuildFile, 1, 1, []], + 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0], + }) + + def __init__(self, properties=None, id=None, parent=None): + # super + XCObject.__init__(self, properties, id, parent) + + self._files_by_path = {} + self._files_by_xcfilelikeelement = {} + for pbxbuildfile in self._properties.get('files', []): + self._AddBuildFileToDicts(pbxbuildfile) + + def FileGroup(self, path): + # Subclasses must override this by returning a two-element tuple. The + # first item in the tuple should be the PBXGroup to which "path" should be + # added, either as a child or deeper descendant. The second item should + # be a boolean indicating whether files should be added into hierarchical + # groups or one single flat group. + raise NotImplementedError, \ + self.__class__.__name__ + ' must implement FileGroup' + + def _AddPathToDict(self, pbxbuildfile, path): + """Adds path to the dict tracking paths belonging to this build phase. + + If the path is already a member of this build phase, raises an exception. + """ + + if path in self._files_by_path: + raise ValueError, 'Found multiple build files with path ' + path + self._files_by_path[path] = pbxbuildfile + + def _AddBuildFileToDicts(self, pbxbuildfile, path=None): + """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. + + If path is specified, then it is the path that is being added to the + phase, and pbxbuildfile must contain either a PBXFileReference directly + referencing that path, or it must contain a PBXVariantGroup that itself + contains a PBXFileReference referencing the path. + + If path is not specified, either the PBXFileReference's path or the paths + of all children of the PBXVariantGroup are taken as being added to the + phase. + + If the path is already present in the phase, raises an exception. + + If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile + are already present in the phase, referenced by a different PBXBuildFile + object, raises an exception. This does not raise an exception when + a PBXFileReference or PBXVariantGroup reappear and are referenced by the + same PBXBuildFile that has already introduced them, because in the case + of PBXVariantGroup objects, they may correspond to multiple paths that are + not all added simultaneously. When this situation occurs, the path needs + to be added to _files_by_path, but nothing needs to change in + _files_by_xcfilelikeelement, and the caller should have avoided adding + the PBXBuildFile if it is already present in the list of children. + """ + + xcfilelikeelement = pbxbuildfile._properties['fileRef'] + + paths = [] + if path != None: + # It's best when the caller provides the path. + if isinstance(xcfilelikeelement, PBXVariantGroup): + paths.append(path) + else: + # If the caller didn't provide a path, there can be either multiple + # paths (PBXVariantGroup) or one. + if isinstance(xcfilelikeelement, PBXVariantGroup): + for variant in xcfilelikeelement._properties['children']: + paths.append(variant.FullPath()) + else: + paths.append(xcfilelikeelement.FullPath()) + + # Add the paths first, because if something's going to raise, the + # messages provided by _AddPathToDict are more useful owing to its + # having access to a real pathname and not just an object's Name(). + for a_path in paths: + self._AddPathToDict(pbxbuildfile, a_path) + + # If another PBXBuildFile references this XCFileLikeElement, there's a + # problem. + if xcfilelikeelement in self._files_by_xcfilelikeelement and \ + self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile: + raise ValueError, 'Found multiple build files for ' + \ + xcfilelikeelement.Name() + self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile + + def AppendBuildFile(self, pbxbuildfile, path=None): + # Callers should use this instead of calling + # AppendProperty('files', pbxbuildfile) directly because this function + # maintains the object's dicts. Better yet, callers can just call AddFile + # with a pathname and not worry about building their own PBXBuildFile + # objects. + self.AppendProperty('files', pbxbuildfile) + self._AddBuildFileToDicts(pbxbuildfile, path) + + def AddFile(self, path, settings=None): + (file_group, hierarchical) = self.FileGroup(path) + file_ref = file_group.AddOrGetFileByPath(path, hierarchical) + + if file_ref in self._files_by_xcfilelikeelement and \ + isinstance(file_ref, PBXVariantGroup): + # There's already a PBXBuildFile in this phase corresponding to the + # PBXVariantGroup. path just provides a new variant that belongs to + # the group. Add the path to the dict. + pbxbuildfile = self._files_by_xcfilelikeelement[file_ref] + self._AddBuildFileToDicts(pbxbuildfile, path) + else: + # Add a new PBXBuildFile to get file_ref into the phase. + if settings is None: + pbxbuildfile = PBXBuildFile({'fileRef': file_ref}) + else: + pbxbuildfile = PBXBuildFile({'fileRef': file_ref, 'settings': settings}) + self.AppendBuildFile(pbxbuildfile, path) + + +class PBXHeadersBuildPhase(XCBuildPhase): + # No additions to the schema relative to XCBuildPhase. + + def Name(self): + return 'Headers' + + def FileGroup(self, path): + return self.PBXProjectAncestor().RootGroupForPath(path) + + +class PBXResourcesBuildPhase(XCBuildPhase): + # No additions to the schema relative to XCBuildPhase. + + def Name(self): + return 'Resources' + + def FileGroup(self, path): + return self.PBXProjectAncestor().RootGroupForPath(path) + + +class PBXSourcesBuildPhase(XCBuildPhase): + # No additions to the schema relative to XCBuildPhase. + + def Name(self): + return 'Sources' + + def FileGroup(self, path): + return self.PBXProjectAncestor().RootGroupForPath(path) + + +class PBXFrameworksBuildPhase(XCBuildPhase): + # No additions to the schema relative to XCBuildPhase. + + def Name(self): + return 'Frameworks' + + def FileGroup(self, path): + (root, ext) = posixpath.splitext(path) + if ext != '': + ext = ext[1:].lower() + if ext == 'o': + # .o files are added to Xcode Frameworks phases, but conceptually aren't + # frameworks, they're more like sources or intermediates. Redirect them + # to show up in one of those other groups. + return self.PBXProjectAncestor().RootGroupForPath(path) + else: + return (self.PBXProjectAncestor().FrameworksGroup(), False) + + +class PBXShellScriptBuildPhase(XCBuildPhase): + _schema = XCBuildPhase._schema.copy() + _schema.update({ + 'inputPaths': [1, str, 0, 1, []], + 'name': [0, str, 0, 0], + 'outputPaths': [1, str, 0, 1, []], + 'shellPath': [0, str, 0, 1, '/bin/sh'], + 'shellScript': [0, str, 0, 1], + 'showEnvVarsInLog': [0, int, 0, 0], + }) + + def Name(self): + if 'name' in self._properties: + return self._properties['name'] + + return 'ShellScript' + + +class PBXCopyFilesBuildPhase(XCBuildPhase): + _schema = XCBuildPhase._schema.copy() + _schema.update({ + 'dstPath': [0, str, 0, 1], + 'dstSubfolderSpec': [0, int, 0, 1], + 'name': [0, str, 0, 0], + }) + + # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is + # "DIR", match group 3 is "path" or None. + path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$') + + # path_tree_to_subfolder maps names of Xcode variables to the associated + # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object. + path_tree_to_subfolder = { + 'BUILT_PRODUCTS_DIR': 16, # Products Directory + # Other types that can be chosen via the Xcode UI. + # TODO(mark): Map Xcode variable names to these. + # : 1, # Wrapper + # : 6, # Executables: 6 + # : 7, # Resources + # : 15, # Java Resources + # : 10, # Frameworks + # : 11, # Shared Frameworks + # : 12, # Shared Support + # : 13, # PlugIns + } + + def Name(self): + if 'name' in self._properties: + return self._properties['name'] + + return 'CopyFiles' + + def FileGroup(self, path): + return self.PBXProjectAncestor().RootGroupForPath(path) + + def SetDestination(self, path): + """Set the dstSubfolderSpec and dstPath properties from path. + + path may be specified in the same notation used for XCHierarchicalElements, + specifically, "$(DIR)/path". + """ + + path_tree_match = self.path_tree_re.search(path) + if path_tree_match: + # Everything else needs to be relative to an Xcode variable. + path_tree = path_tree_match.group(1) + relative_path = path_tree_match.group(3) + + if path_tree in self.path_tree_to_subfolder: + subfolder = self.path_tree_to_subfolder[path_tree] + if relative_path == None: + relative_path = '' + else: + # The path starts with an unrecognized Xcode variable + # name like $(SRCROOT). Xcode will still handle this + # as an "absolute path" that starts with the variable. + subfolder = 0 + relative_path = path + elif path.startswith('/'): + # Special case. Absolute paths are in dstSubfolderSpec 0. + subfolder = 0 + relative_path = path[1:] + else: + raise ValueError, 'Can\'t use path %s in a %s' % \ + (path, self.__class__.__name__) + + self._properties['dstPath'] = relative_path + self._properties['dstSubfolderSpec'] = subfolder + + +class PBXBuildRule(XCObject): + _schema = XCObject._schema.copy() + _schema.update({ + 'compilerSpec': [0, str, 0, 1], + 'filePatterns': [0, str, 0, 0], + 'fileType': [0, str, 0, 1], + 'isEditable': [0, int, 0, 1, 1], + 'outputFiles': [1, str, 0, 1, []], + 'script': [0, str, 0, 0], + }) + + def Name(self): + # Not very inspired, but it's what Xcode uses. + return self.__class__.__name__ + + def Hashables(self): + # super + hashables = XCObject.Hashables(self) + + # Use the hashables of the weak objects that this object refers to. + hashables.append(self._properties['fileType']) + if 'filePatterns' in self._properties: + hashables.append(self._properties['filePatterns']) + return hashables + + +class PBXContainerItemProxy(XCObject): + # When referencing an item in this project file, containerPortal is the + # PBXProject root object of this project file. When referencing an item in + # another project file, containerPortal is a PBXFileReference identifying + # the other project file. + # + # When serving as a proxy to an XCTarget (in this project file or another), + # proxyType is 1. When serving as a proxy to a PBXFileReference (in another + # project file), proxyType is 2. Type 2 is used for references to the + # producs of the other project file's targets. + # + # Xcode is weird about remoteGlobalIDString. Usually, it's printed without + # a comment, indicating that it's tracked internally simply as a string, but + # sometimes it's printed with a comment (usually when the object is initially + # created), indicating that it's tracked as a project file object at least + # sometimes. This module always tracks it as an object, but contains a hack + # to prevent it from printing the comment in the project file output. See + # _XCKVPrint. + _schema = XCObject._schema.copy() + _schema.update({ + 'containerPortal': [0, XCContainerPortal, 0, 1], + 'proxyType': [0, int, 0, 1], + 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1], + 'remoteInfo': [0, str, 0, 1], + }) + + def __repr__(self): + props = self._properties + name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo']) + return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) + + def Name(self): + # Admittedly not the best name, but it's what Xcode uses. + return self.__class__.__name__ + + def Hashables(self): + # super + hashables = XCObject.Hashables(self) + + # Use the hashables of the weak objects that this object refers to. + hashables.extend(self._properties['containerPortal'].Hashables()) + hashables.extend(self._properties['remoteGlobalIDString'].Hashables()) + return hashables + + +class PBXTargetDependency(XCObject): + # The "target" property accepts an XCTarget object, and obviously not + # NoneType. But XCTarget is defined below, so it can't be put into the + # schema yet. The definition of PBXTargetDependency can't be moved below + # XCTarget because XCTarget's own schema references PBXTargetDependency. + # Python doesn't deal well with this circular relationship, and doesn't have + # a real way to do forward declarations. To work around, the type of + # the "target" property is reset below, after XCTarget is defined. + # + # At least one of "name" and "target" is required. + _schema = XCObject._schema.copy() + _schema.update({ + 'name': [0, str, 0, 0], + 'target': [0, None.__class__, 0, 0], + 'targetProxy': [0, PBXContainerItemProxy, 1, 1], + }) + + def __repr__(self): + name = self._properties.get('name') or self._properties['target'].Name() + return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) + + def Name(self): + # Admittedly not the best name, but it's what Xcode uses. + return self.__class__.__name__ + + def Hashables(self): + # super + hashables = XCObject.Hashables(self) + + # Use the hashables of the weak objects that this object refers to. + hashables.extend(self._properties['targetProxy'].Hashables()) + return hashables + + +class PBXReferenceProxy(XCFileLikeElement): + _schema = XCFileLikeElement._schema.copy() + _schema.update({ + 'fileType': [0, str, 0, 1], + 'path': [0, str, 0, 1], + 'remoteRef': [0, PBXContainerItemProxy, 1, 1], + }) + + +class XCTarget(XCRemoteObject): + # An XCTarget is really just an XCObject, the XCRemoteObject thing is just + # to allow PBXProject to be used in the remoteGlobalIDString property of + # PBXContainerItemProxy. + # + # Setting a "name" property at instantiation may also affect "productName", + # which may in turn affect the "PRODUCT_NAME" build setting in children of + # "buildConfigurationList". See __init__ below. + _schema = XCRemoteObject._schema.copy() + _schema.update({ + 'buildConfigurationList': [0, XCConfigurationList, 1, 1, + XCConfigurationList()], + 'buildPhases': [1, XCBuildPhase, 1, 1, []], + 'dependencies': [1, PBXTargetDependency, 1, 1, []], + 'name': [0, str, 0, 1], + 'productName': [0, str, 0, 1], + }) + + def __init__(self, properties=None, id=None, parent=None, + force_outdir=None, force_prefix=None, force_extension=None): + # super + XCRemoteObject.__init__(self, properties, id, parent) + + # Set up additional defaults not expressed in the schema. If a "name" + # property was supplied, set "productName" if it is not present. Also set + # the "PRODUCT_NAME" build setting in each configuration, but only if + # the setting is not present in any build configuration. + if 'name' in self._properties: + if not 'productName' in self._properties: + self.SetProperty('productName', self._properties['name']) + + if 'productName' in self._properties: + if 'buildConfigurationList' in self._properties: + configs = self._properties['buildConfigurationList'] + if configs.HasBuildSetting('PRODUCT_NAME') == 0: + configs.SetBuildSetting('PRODUCT_NAME', + self._properties['productName']) + + def AddDependency(self, other): + pbxproject = self.PBXProjectAncestor() + other_pbxproject = other.PBXProjectAncestor() + if pbxproject == other_pbxproject: + # The easy case. Add a dependency to another target in the same + # project file. + container = PBXContainerItemProxy({'containerPortal': pbxproject, + 'proxyType': 1, + 'remoteGlobalIDString': other, + 'remoteInfo': other.Name()}) + dependency = PBXTargetDependency({'target': other, + 'targetProxy': container}) + self.AppendProperty('dependencies', dependency) + else: + # The hard case. Add a dependency to a target in a different project + # file. Actually, this case isn't really so hard. + other_project_ref = \ + pbxproject.AddOrGetProjectReference(other_pbxproject)[1] + container = PBXContainerItemProxy({ + 'containerPortal': other_project_ref, + 'proxyType': 1, + 'remoteGlobalIDString': other, + 'remoteInfo': other.Name(), + }) + dependency = PBXTargetDependency({'name': other.Name(), + 'targetProxy': container}) + self.AppendProperty('dependencies', dependency) + + # Proxy all of these through to the build configuration list. + + def ConfigurationNamed(self, name): + return self._properties['buildConfigurationList'].ConfigurationNamed(name) + + def DefaultConfiguration(self): + return self._properties['buildConfigurationList'].DefaultConfiguration() + + def HasBuildSetting(self, key): + return self._properties['buildConfigurationList'].HasBuildSetting(key) + + def GetBuildSetting(self, key): + return self._properties['buildConfigurationList'].GetBuildSetting(key) + + def SetBuildSetting(self, key, value): + return self._properties['buildConfigurationList'].SetBuildSetting(key, \ + value) + + def AppendBuildSetting(self, key, value): + return self._properties['buildConfigurationList'].AppendBuildSetting(key, \ + value) + + def DelBuildSetting(self, key): + return self._properties['buildConfigurationList'].DelBuildSetting(key) + + +# Redefine the type of the "target" property. See PBXTargetDependency._schema +# above. +PBXTargetDependency._schema['target'][1] = XCTarget + + +class PBXNativeTarget(XCTarget): + # buildPhases is overridden in the schema to be able to set defaults. + # + # NOTE: Contrary to most objects, it is advisable to set parent when + # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject + # object. A parent reference is required for a PBXNativeTarget during + # construction to be able to set up the target defaults for productReference, + # because a PBXBuildFile object must be created for the target and it must + # be added to the PBXProject's mainGroup hierarchy. + _schema = XCTarget._schema.copy() + _schema.update({ + 'buildPhases': [1, XCBuildPhase, 1, 1, + [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]], + 'buildRules': [1, PBXBuildRule, 1, 1, []], + 'productReference': [0, PBXFileReference, 0, 1], + 'productType': [0, str, 0, 1], + }) + + # Mapping from Xcode product-types to settings. The settings are: + # filetype : used for explicitFileType in the project file + # prefix : the prefix for the file name + # suffix : the suffix for the filen ame + _product_filetypes = { + 'com.apple.product-type.application': ['wrapper.application', + '', '.app'], + 'com.apple.product-type.bundle': ['wrapper.cfbundle', + '', '.bundle'], + 'com.apple.product-type.framework': ['wrapper.framework', + '', '.framework'], + 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib', + 'lib', '.dylib'], + 'com.apple.product-type.library.static': ['archive.ar', + 'lib', '.a'], + 'com.apple.product-type.tool': ['compiled.mach-o.executable', + '', ''], + 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib', + '', '.so'], + } + + def __init__(self, properties=None, id=None, parent=None, + force_outdir=None, force_prefix=None, force_extension=None): + # super + XCTarget.__init__(self, properties, id, parent) + + if 'productName' in self._properties and \ + 'productType' in self._properties and \ + not 'productReference' in self._properties and \ + self._properties['productType'] in self._product_filetypes: + products_group = None + pbxproject = self.PBXProjectAncestor() + if pbxproject != None: + products_group = pbxproject.ProductsGroup() + + if products_group != None: + (filetype, prefix, suffix) = \ + self._product_filetypes[self._properties['productType']] + # Xcode does not have a distinct type for loadable modules that are + # pure BSD targets (not in a bundle wrapper). GYP allows such modules + # to be specified by setting a target type to loadable_module without + # having mac_bundle set. These are mapped to the pseudo-product type + # com.googlecode.gyp.xcode.bundle. + # + # By picking up this special type and converting it to a dynamic + # library (com.apple.product-type.library.dynamic) with fix-ups, + # single-file loadable modules can be produced. + # + # MACH_O_TYPE is changed to mh_bundle to produce the proper file type + # (as opposed to mh_dylib). In order for linking to succeed, + # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be + # cleared. They are meaningless for type mh_bundle. + # + # Finally, the .so extension is forcibly applied over the default + # (.dylib), unless another forced extension is already selected. + # .dylib is plainly wrong, and .bundle is used by loadable_modules in + # bundle wrappers (com.apple.product-type.bundle). .so seems an odd + # choice because it's used as the extension on many other systems that + # don't distinguish between linkable shared libraries and non-linkable + # loadable modules, but there's precedent: Python loadable modules on + # Mac OS X use an .so extension. + if self._properties['productType'] == 'com.googlecode.gyp.xcode.bundle': + self._properties['productType'] = \ + 'com.apple.product-type.library.dynamic' + self.SetBuildSetting('MACH_O_TYPE', 'mh_bundle') + self.SetBuildSetting('DYLIB_CURRENT_VERSION', '') + self.SetBuildSetting('DYLIB_COMPATIBILITY_VERSION', '') + if force_extension == None: + force_extension = suffix[1:] + + if force_extension is not None: + # If it's a wrapper (bundle), set WRAPPER_EXTENSION. + if filetype.startswith('wrapper.'): + self.SetBuildSetting('WRAPPER_EXTENSION', force_extension) + else: + # Extension override. + suffix = '.' + force_extension + self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension) + + if filetype.startswith('compiled.mach-o.executable'): + product_name = self._properties['productName'] + product_name += suffix + suffix = '' + self.SetProperty('productName', product_name) + self.SetBuildSetting('PRODUCT_NAME', product_name) + + # Xcode handles most prefixes based on the target type, however there + # are exceptions. If a "BSD Dynamic Library" target is added in the + # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that + # behavior. + if force_prefix is not None: + prefix = force_prefix + if filetype.startswith('wrapper.'): + self.SetBuildSetting('WRAPPER_PREFIX', prefix) + else: + self.SetBuildSetting('EXECUTABLE_PREFIX', prefix) + + if force_outdir is not None: + self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir) + + # TODO(tvl): Remove the below hack. + # http://code.google.com/p/gyp/issues/detail?id=122 + + # Some targets include the prefix in the target_name. These targets + # really should just add a product_name setting that doesn't include + # the prefix. For example: + # target_name = 'libevent', product_name = 'event' + # This check cleans up for them. + product_name = self._properties['productName'] + prefix_len = len(prefix) + if prefix_len and (product_name[:prefix_len] == prefix): + product_name = product_name[prefix_len:] + self.SetProperty('productName', product_name) + self.SetBuildSetting('PRODUCT_NAME', product_name) + + ref_props = { + 'explicitFileType': filetype, + 'includeInIndex': 0, + 'path': prefix + product_name + suffix, + 'sourceTree': 'BUILT_PRODUCTS_DIR', + } + file_ref = PBXFileReference(ref_props) + products_group.AppendChild(file_ref) + self.SetProperty('productReference', file_ref) + + def GetBuildPhaseByType(self, type): + if not 'buildPhases' in self._properties: + return None + + the_phase = None + for phase in self._properties['buildPhases']: + if isinstance(phase, type): + # Some phases may be present in multiples in a well-formed project file, + # but phases like PBXSourcesBuildPhase may only be present singly, and + # this function is intended as an aid to GetBuildPhaseByType. Loop + # over the entire list of phases and assert if more than one of the + # desired type is found. + assert the_phase == None + the_phase = phase + + return the_phase + + def HeadersPhase(self): + headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase) + if headers_phase == None: + headers_phase = PBXHeadersBuildPhase() + + # The headers phase should come before the resources, sources, and + # frameworks phases, if any. + insert_at = len(self._properties['buildPhases']) + for index in xrange(0, len(self._properties['buildPhases'])): + phase = self._properties['buildPhases'][index] + if isinstance(phase, PBXResourcesBuildPhase) or \ + isinstance(phase, PBXSourcesBuildPhase) or \ + isinstance(phase, PBXFrameworksBuildPhase): + insert_at = index + break + + self._properties['buildPhases'].insert(insert_at, headers_phase) + headers_phase.parent = self + + return headers_phase + + def ResourcesPhase(self): + resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase) + if resources_phase == None: + resources_phase = PBXResourcesBuildPhase() + + # The resources phase should come before the sources and frameworks + # phases, if any. + insert_at = len(self._properties['buildPhases']) + for index in xrange(0, len(self._properties['buildPhases'])): + phase = self._properties['buildPhases'][index] + if isinstance(phase, PBXSourcesBuildPhase) or \ + isinstance(phase, PBXFrameworksBuildPhase): + insert_at = index + break + + self._properties['buildPhases'].insert(insert_at, resources_phase) + resources_phase.parent = self + + return resources_phase + + def SourcesPhase(self): + sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase) + if sources_phase == None: + sources_phase = PBXSourcesBuildPhase() + self.AppendProperty('buildPhases', sources_phase) + + return sources_phase + + def FrameworksPhase(self): + frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase) + if frameworks_phase == None: + frameworks_phase = PBXFrameworksBuildPhase() + self.AppendProperty('buildPhases', frameworks_phase) + + return frameworks_phase + + def AddDependency(self, other): + # super + XCTarget.AddDependency(self, other) + + static_library_type = 'com.apple.product-type.library.static' + shared_library_type = 'com.apple.product-type.library.dynamic' + framework_type = 'com.apple.product-type.framework' + if isinstance(other, PBXNativeTarget) and \ + 'productType' in self._properties and \ + self._properties['productType'] != static_library_type and \ + 'productType' in other._properties and \ + (other._properties['productType'] == static_library_type or \ + ((other._properties['productType'] == shared_library_type or \ + other._properties['productType'] == framework_type) and \ + ((not other.HasBuildSetting('MACH_O_TYPE')) or + other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))): + + file_ref = other.GetProperty('productReference') + + pbxproject = self.PBXProjectAncestor() + other_pbxproject = other.PBXProjectAncestor() + if pbxproject != other_pbxproject: + other_project_product_group = \ + pbxproject.AddOrGetProjectReference(other_pbxproject)[0] + file_ref = other_project_product_group.GetChildByRemoteObject(file_ref) + + self.FrameworksPhase().AppendProperty('files', + PBXBuildFile({'fileRef': file_ref})) + + +class PBXAggregateTarget(XCTarget): + pass + + +class PBXProject(XCContainerPortal): + # A PBXProject is really just an XCObject, the XCContainerPortal thing is + # just to allow PBXProject to be used in the containerPortal property of + # PBXContainerItemProxy. + """ + + Attributes: + path: "sample.xcodeproj". TODO(mark) Document me! + _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each + value is a reference to the dict in the + projectReferences list associated with the keyed + PBXProject. + """ + + _schema = XCContainerPortal._schema.copy() + _schema.update({ + 'attributes': [0, dict, 0, 0], + 'buildConfigurationList': [0, XCConfigurationList, 1, 1, + XCConfigurationList()], + 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.2'], + 'hasScannedForEncodings': [0, int, 0, 1, 1], + 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()], + 'projectDirPath': [0, str, 0, 1, ''], + 'projectReferences': [1, dict, 0, 0], + 'projectRoot': [0, str, 0, 1, ''], + 'targets': [1, XCTarget, 1, 1, []], + }) + + def __init__(self, properties=None, id=None, parent=None, path=None): + self.path = path + self._other_pbxprojects = {} + # super + return XCContainerPortal.__init__(self, properties, id, parent) + + def Name(self): + name = self.path + if name[-10:] == '.xcodeproj': + name = name[:-10] + return posixpath.basename(name) + + def Path(self): + return self.path + + def Comment(self): + return 'Project object' + + def Children(self): + # super + children = XCContainerPortal.Children(self) + + # Add children that the schema doesn't know about. Maybe there's a more + # elegant way around this, but this is the only case where we need to own + # objects in a dictionary (that is itself in a list), and three lines for + # a one-off isn't that big a deal. + if 'projectReferences' in self._properties: + for reference in self._properties['projectReferences']: + children.append(reference['ProductGroup']) + + return children + + def PBXProjectAncestor(self): + return self + + def _GroupByName(self, name): + if not 'mainGroup' in self._properties: + self.SetProperty('mainGroup', PBXGroup()) + + main_group = self._properties['mainGroup'] + group = main_group.GetChildByName(name) + if group == None: + group = PBXGroup({'name': name}) + main_group.AppendChild(group) + + return group + + # SourceGroup and ProductsGroup are created by default in Xcode's own + # templates. + def SourceGroup(self): + return self._GroupByName('Source') + + def ProductsGroup(self): + return self._GroupByName('Products') + + # IntermediatesGroup is used to collect source-like files that are generated + # by rules or script phases and are placed in intermediate directories such + # as DerivedSources. + def IntermediatesGroup(self): + return self._GroupByName('Intermediates') + + # FrameworksGroup and ProjectsGroup are top-level groups used to collect + # frameworks and projects. + def FrameworksGroup(self): + return self._GroupByName('Frameworks') + + def ProjectsGroup(self): + return self._GroupByName('Projects') + + def RootGroupForPath(self, path): + """Returns a PBXGroup child of this object to which path should be added. + + This method is intended to choose between SourceGroup and + IntermediatesGroup on the basis of whether path is present in a source + directory or an intermediates directory. For the purposes of this + determination, any path located within a derived file directory such as + PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates + directory. + + The returned value is a two-element tuple. The first element is the + PBXGroup, and the second element specifies whether that group should be + organized hierarchically (True) or as a single flat list (False). + """ + + # TODO(mark): make this a class variable and bind to self on call? + # Also, this list is nowhere near exhaustive. + # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by + # gyp.generator.xcode. There should probably be some way for that module + # to push the names in, rather than having to hard-code them here. + source_tree_groups = { + 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True), + 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True), + 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True), + 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True), + } + + (source_tree, path) = SourceTreeAndPathFromPath(path) + if source_tree != None and source_tree in source_tree_groups: + (group_func, hierarchical) = source_tree_groups[source_tree] + group = group_func() + return (group, hierarchical) + + # TODO(mark): make additional choices based on file extension. + + return (self.SourceGroup(), True) + + def AddOrGetFileInRootGroup(self, path): + """Returns a PBXFileReference corresponding to path in the correct group + according to RootGroupForPath's heuristics. + + If an existing PBXFileReference for path exists, it will be returned. + Otherwise, one will be created and returned. + """ + + (group, hierarchical) = self.RootGroupForPath(path) + return group.AddOrGetFileByPath(path, hierarchical) + + def RootGroupsTakeOverOnlyChildren(self, recurse=False): + """Calls TakeOverOnlyChild for all groups in the main group.""" + + for group in self._properties['mainGroup']._properties['children']: + if isinstance(group, PBXGroup): + group.TakeOverOnlyChild(recurse) + + def SortGroups(self): + # Sort the children of the mainGroup (like "Source" and "Products") + # according to their defined order. + self._properties['mainGroup']._properties['children'] = \ + sorted(self._properties['mainGroup']._properties['children'], + cmp=lambda x,y: x.CompareRootGroup(y)) + + # Sort everything else by putting group before files, and going + # alphabetically by name within sections of groups and files. SortGroup + # is recursive. + for group in self._properties['mainGroup']._properties['children']: + if not isinstance(group, PBXGroup): + continue + + if group.Name() == 'Products': + # The Products group is a special case. Instead of sorting + # alphabetically, sort things in the order of the targets that + # produce the products. To do this, just build up a new list of + # products based on the targets. + products = [] + for target in self._properties['targets']: + if not isinstance(target, PBXNativeTarget): + continue + product = target._properties['productReference'] + # Make sure that the product is already in the products group. + assert product in group._properties['children'] + products.append(product) + + # Make sure that this process doesn't miss anything that was already + # in the products group. + assert len(products) == len(group._properties['children']) + group._properties['children'] = products + else: + group.SortGroup() + + def AddOrGetProjectReference(self, other_pbxproject): + """Add a reference to another project file (via PBXProject object) to this + one. + + Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in + this project file that contains a PBXReferenceProxy object for each + product of each PBXNativeTarget in the other project file. ProjectRef is + a PBXFileReference to the other project file. + + If this project file already references the other project file, the + existing ProductGroup and ProjectRef are returned. The ProductGroup will + still be updated if necessary. + """ + + if not 'projectReferences' in self._properties: + self._properties['projectReferences'] = [] + + product_group = None + project_ref = None + + if not other_pbxproject in self._other_pbxprojects: + # This project file isn't yet linked to the other one. Establish the + # link. + product_group = PBXGroup({'name': 'Products'}) + + # ProductGroup is strong. + product_group.parent = self + + # There's nothing unique about this PBXGroup, and if left alone, it will + # wind up with the same set of hashables as all other PBXGroup objects + # owned by the projectReferences list. Add the hashables of the + # remote PBXProject that it's related to. + product_group._hashables.extend(other_pbxproject.Hashables()) + + # The other project reports its path as relative to the same directory + # that this project's path is relative to. The other project's path + # is not necessarily already relative to this project. Figure out the + # pathname that this project needs to use to refer to the other one. + this_path = posixpath.dirname(self.Path()) + projectDirPath = self.GetProperty('projectDirPath') + if projectDirPath: + if posixpath.isabs(projectDirPath[0]): + this_path = projectDirPath + else: + this_path = posixpath.join(this_path, projectDirPath) + other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path) + + # ProjectRef is weak (it's owned by the mainGroup hierarchy). + project_ref = PBXFileReference({ + 'lastKnownFileType': 'wrapper.pb-project', + 'path': other_path, + 'sourceTree': 'SOURCE_ROOT', + }) + self.ProjectsGroup().AppendChild(project_ref) + + ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref} + self._other_pbxprojects[other_pbxproject] = ref_dict + self.AppendProperty('projectReferences', ref_dict) + + # Xcode seems to sort this list case-insensitively + self._properties['projectReferences'] = \ + sorted(self._properties['projectReferences'], cmp=lambda x,y: + cmp(x['ProjectRef'].Name().lower(), + y['ProjectRef'].Name().lower())) + else: + # The link already exists. Pull out the relevnt data. + project_ref_dict = self._other_pbxprojects[other_pbxproject] + product_group = project_ref_dict['ProductGroup'] + project_ref = project_ref_dict['ProjectRef'] + + self._SetUpProductReferences(other_pbxproject, product_group, project_ref) + + return [product_group, project_ref] + + def _SetUpProductReferences(self, other_pbxproject, product_group, + project_ref): + # TODO(mark): This only adds references to products in other_pbxproject + # when they don't exist in this pbxproject. Perhaps it should also + # remove references from this pbxproject that are no longer present in + # other_pbxproject. Perhaps it should update various properties if they + # change. + for target in other_pbxproject._properties['targets']: + if not isinstance(target, PBXNativeTarget): + continue + + other_fileref = target._properties['productReference'] + if product_group.GetChildByRemoteObject(other_fileref) == None: + # Xcode sets remoteInfo to the name of the target and not the name + # of its product, despite this proxy being a reference to the product. + container_item = PBXContainerItemProxy({ + 'containerPortal': project_ref, + 'proxyType': 2, + 'remoteGlobalIDString': other_fileref, + 'remoteInfo': target.Name() + }) + # TODO(mark): Does sourceTree get copied straight over from the other + # project? Can the other project ever have lastKnownFileType here + # instead of explicitFileType? (Use it if so?) Can path ever be + # unset? (I don't think so.) Can other_fileref have name set, and + # does it impact the PBXReferenceProxy if so? These are the questions + # that perhaps will be answered one day. + reference_proxy = PBXReferenceProxy({ + 'fileType': other_fileref._properties['explicitFileType'], + 'path': other_fileref._properties['path'], + 'sourceTree': other_fileref._properties['sourceTree'], + 'remoteRef': container_item, + }) + + product_group.AppendChild(reference_proxy) + + def SortRemoteProductReferences(self): + # For each remote project file, sort the associated ProductGroup in the + # same order that the targets are sorted in the remote project file. This + # is the sort order used by Xcode. + + def CompareProducts(x, y, remote_products): + # x and y are PBXReferenceProxy objects. Go through their associated + # PBXContainerItem to get the remote PBXFileReference, which will be + # present in the remote_products list. + x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString'] + y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString'] + x_index = remote_products.index(x_remote) + y_index = remote_products.index(y_remote) + + # Use the order of each remote PBXFileReference in remote_products to + # determine the sort order. + return cmp(x_index, y_index) + + for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems(): + # Build up a list of products in the remote project file, ordered the + # same as the targets that produce them. + remote_products = [] + for target in other_pbxproject._properties['targets']: + if not isinstance(target, PBXNativeTarget): + continue + remote_products.append(target._properties['productReference']) + + # Sort the PBXReferenceProxy children according to the list of remote + # products. + product_group = ref_dict['ProductGroup'] + product_group._properties['children'] = sorted( + product_group._properties['children'], + cmp=lambda x, y: CompareProducts(x, y, remote_products)) + + +class XCProjectFile(XCObject): + _schema = XCObject._schema.copy() + _schema.update({ + 'archiveVersion': [0, int, 0, 1, 1], + 'classes': [0, dict, 0, 1, {}], + 'objectVersion': [0, int, 0, 1, 45], + 'rootObject': [0, PBXProject, 1, 1], + }) + + def SetXcodeVersion(self, version): + version_to_object_version = { + '2.4': 45, + '3.0': 45, + '3.1': 45, + '3.2': 46, + } + if not version in version_to_object_version: + supported_str = ', '.join(sorted(version_to_object_version.keys())) + raise Exception( + 'Unsupported Xcode version %s (supported: %s)' % + ( version, supported_str ) ) + compatibility_version = 'Xcode %s' % version + self._properties['rootObject'].SetProperty('compatibilityVersion', + compatibility_version) + self.SetProperty('objectVersion', version_to_object_version[version]); + + def ComputeIDs(self, recursive=True, overwrite=True, hash=None): + # Although XCProjectFile is implemented here as an XCObject, it's not a + # proper object in the Xcode sense, and it certainly doesn't have its own + # ID. Pass through an attempt to update IDs to the real root object. + if recursive: + self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash) + + def Print(self, file=sys.stdout): + self.VerifyHasRequiredProperties() + + # Add the special "objects" property, which will be caught and handled + # separately during printing. This structure allows a fairly standard + # loop do the normal printing. + self._properties['objects'] = {} + self._XCPrint(file, 0, '// !$*UTF8*$!\n') + if self._should_print_single_line: + self._XCPrint(file, 0, '{ ') + else: + self._XCPrint(file, 0, '{\n') + for property, value in sorted(self._properties.iteritems(), + cmp=lambda x, y: cmp(x, y)): + if property == 'objects': + self._PrintObjects(file) + else: + self._XCKVPrint(file, 1, property, value) + self._XCPrint(file, 0, '}\n') + del self._properties['objects'] + + def _PrintObjects(self, file): + if self._should_print_single_line: + self._XCPrint(file, 0, 'objects = {') + else: + self._XCPrint(file, 1, 'objects = {\n') + + objects_by_class = {} + for object in self.Descendants(): + if object == self: + continue + class_name = object.__class__.__name__ + if not class_name in objects_by_class: + objects_by_class[class_name] = [] + objects_by_class[class_name].append(object) + + for class_name in sorted(objects_by_class): + self._XCPrint(file, 0, '\n') + self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n') + for object in sorted(objects_by_class[class_name], + cmp=lambda x, y: cmp(x.id, y.id)): + object.Print(file) + self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n') + + if self._should_print_single_line: + self._XCPrint(file, 0, '}; ') + else: + self._XCPrint(file, 1, '};\n') diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/xml_fix.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/xml_fix.py new file mode 100644 index 0000000000..5de848158d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/xml_fix.py @@ -0,0 +1,69 @@ +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Applies a fix to CR LF TAB handling in xml.dom. + +Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293 +Working around this: http://bugs.python.org/issue5752 +TODO(bradnelson): Consider dropping this when we drop XP support. +""" + + +import xml.dom.minidom + + +def _Replacement_write_data(writer, data, is_attrib=False): + """Writes datachars to writer.""" + data = data.replace("&", "&").replace("<", "<") + data = data.replace("\"", """).replace(">", ">") + if is_attrib: + data = data.replace( + "\r", " ").replace( + "\n", " ").replace( + "\t", " ") + writer.write(data) + + +def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): + # indent = current indentation + # addindent = indentation to add to higher levels + # newl = newline string + writer.write(indent+"<" + self.tagName) + + attrs = self._get_attributes() + a_names = attrs.keys() + a_names.sort() + + for a_name in a_names: + writer.write(" %s=\"" % a_name) + _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True) + writer.write("\"") + if self.childNodes: + writer.write(">%s" % newl) + for node in self.childNodes: + node.writexml(writer, indent + addindent, addindent, newl) + writer.write("%s%s" % (indent, self.tagName, newl)) + else: + writer.write("/>%s" % newl) + + +class XmlFix(object): + """Object to manage temporary patching of xml.dom.minidom.""" + + def __init__(self): + # Preserve current xml.dom.minidom functions. + self.write_data = xml.dom.minidom._write_data + self.writexml = xml.dom.minidom.Element.writexml + # Inject replacement versions of a function and a method. + xml.dom.minidom._write_data = _Replacement_write_data + xml.dom.minidom.Element.writexml = _Replacement_writexml + + def Cleanup(self): + if self.write_data: + xml.dom.minidom._write_data = self.write_data + xml.dom.minidom.Element.writexml = self.writexml + self.write_data = None + + def __del__(self): + self.Cleanup() diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylintrc b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylintrc new file mode 100644 index 0000000000..d7c23d2a21 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/pylintrc @@ -0,0 +1,307 @@ +[MASTER] + +# Specify a configuration file. +#rcfile= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Profiled execution. +profile=no + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + + +[MESSAGES CONTROL] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). +# C0103: Invalid name "NN" (should match [a-z_][a-z0-9_]{2,30}$) +# C0111: Missing docstring +# C0302: Too many lines in module (NN) +# R0902: Too many instance attributes (N/7) +# R0903: Too few public methods (N/2) +# R0904: Too many public methods (NN/20) +# R0912: Too many branches (NN/12) +# R0913: Too many arguments (N/5) +# R0914: Too many local variables (NN/15) +# R0915: Too many statements (NN/50) +# W0141: Used builtin function 'map' +# W0142: Used * or ** magic +# W0232: Class has no __init__ method +# W0511: TODO +# W0603: Using the global statement +# +# These should be enabled eventually: +# C0112: Empty docstring +# C0301: Line too long (NN/80) +# C0321: More than one statement on single line +# C0322: Operator not preceded by a space +# C0323: Operator not followed by a space +# C0324: Comma not followed by a space +# E0101: Explicit return in __init__ +# E0102: function already defined line NN +# E1002: Use of super on an old style class +# E1101: Instance of 'XX' has no 'YY' member +# E1103: Instance of 'XX' has no 'XX' member (but some types could not be inferred) +# E0602: Undefined variable 'XX' +# F0401: Unable to import 'XX' +# R0201: Method could be a function +# R0801: Similar lines in N files +# W0102: Dangerous default value {} as argument +# W0104: Statement seems to have no effect +# W0105: String statement has no effect +# W0108: Lambda may not be necessary +# W0201: Attribute 'XX' defined outside __init__ +# W0212: Access to a protected member XX of a client class +# W0221: Arguments number differs from overridden method +# W0223: Method 'XX' is abstract in class 'YY' but is not overridden +# W0231: __init__ method from base class 'XX' is not called +# W0301: Unnecessary semicolon +# W0311: Bad indentation. Found NN spaces, expected NN +# W0401: Wildcard import XX +# W0402: Uses of a deprecated module 'string' +# W0403: Relative import 'XX', should be 'YY.XX' +# W0404: Reimport 'XX' (imported line NN) +# W0601: Global variable 'XX' undefined at the module level +# W0602: Using global for 'XX' but no assignment is done +# W0611: Unused import pprint +# W0612: Unused variable 'XX' +# W0613: Unused argument 'XX' +# W0614: Unused import XX from wildcard import +# W0621: Redefining name 'XX' from outer scope (line NN) +# W0622: Redefining built-in 'NN' +# W0631: Using possibly undefined loop variable 'XX' +# W0701: Raising a string exception +# W0702: No exception type(s) specified +disable=C0103,C0111,C0302,R0902,R0903,R0904,R0912,R0913,R0914,R0915,W0141,W0142,W0232,W0511,W0603,C0112,C0301,C0321,C0322,C0323,C0324,E0101,E0102,E1002,E1101,E1103,E0602,F0401,R0201,R0801,W0102,W0104,W0105,W0108,W0201,W0212,W0221,W0223,W0231,W0301,W0311,W0401,W0402,W0403,W0404,W0601,W0602,W0611,W0612,W0613,W0614,W0621,W0622,W0631,W0701,W0702 + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html +output-format=text + +# Include message's id in output +include-ids=yes + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". +files-output=no + +# Tells whether to display a full report or only the messages +reports=no + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Add a comment according to your evaluation note. This is used by the global +# evaluation report (RP0004). +comment=no + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the beginning of the name of dummy variables +# (i.e. not used). +dummy-variables-rgx=_|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of classes names for which member attributes should not be checked +# (useful for classes with attributes dynamically set). +ignored-classes=SQLObject + +# When zope mode is activated, add a predefined set of Zope acquired attributes +# to generated-members. +zope=no + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E0201 when accessed. Python regular +# expressions are accepted. +generated-members=REQUEST,acl_users,aq_parent + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=4 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=80 + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + + +[BASIC] + +# Required attributes for module, separated by a comma +required-attributes= + +# List of builtins function names that should not be used, separated by a comma +bad-functions=map,filter,apply,input + +# Regular expression which should only match correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression which should only match correct module level names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression which should only match correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression which should only match correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct instance attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct list comprehension / +# generator expression variable names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Regular expression which should only match functions or classes name which do +# not require a docstring +no-docstring-rgx=__.*__ + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branchs=12 + +# Maximum number of statements in function / method body +max-statements=50 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + + +[CLASSES] + +# List of interface methods to ignore, separated by a comma. This is used for +# instance to not check methods defines in Zope's Interface base class. +ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,string,TERMIOS,Bastion,rexec + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/samples/samples b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/samples/samples new file mode 100755 index 0000000000..804b618998 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/samples/samples @@ -0,0 +1,81 @@ +#!/usr/bin/python + +# Copyright (c) 2009 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os.path +import shutil +import sys + + +gyps = [ + 'app/app.gyp', + 'base/base.gyp', + 'build/temp_gyp/googleurl.gyp', + 'build/all.gyp', + 'build/common.gypi', + 'build/external_code.gypi', + 'chrome/test/security_tests/security_tests.gyp', + 'chrome/third_party/hunspell/hunspell.gyp', + 'chrome/chrome.gyp', + 'media/media.gyp', + 'net/net.gyp', + 'printing/printing.gyp', + 'sdch/sdch.gyp', + 'skia/skia.gyp', + 'testing/gmock.gyp', + 'testing/gtest.gyp', + 'third_party/bzip2/bzip2.gyp', + 'third_party/icu38/icu38.gyp', + 'third_party/libevent/libevent.gyp', + 'third_party/libjpeg/libjpeg.gyp', + 'third_party/libpng/libpng.gyp', + 'third_party/libxml/libxml.gyp', + 'third_party/libxslt/libxslt.gyp', + 'third_party/lzma_sdk/lzma_sdk.gyp', + 'third_party/modp_b64/modp_b64.gyp', + 'third_party/npapi/npapi.gyp', + 'third_party/sqlite/sqlite.gyp', + 'third_party/zlib/zlib.gyp', + 'v8/tools/gyp/v8.gyp', + 'webkit/activex_shim/activex_shim.gyp', + 'webkit/activex_shim_dll/activex_shim_dll.gyp', + 'webkit/build/action_csspropertynames.py', + 'webkit/build/action_cssvaluekeywords.py', + 'webkit/build/action_jsconfig.py', + 'webkit/build/action_makenames.py', + 'webkit/build/action_maketokenizer.py', + 'webkit/build/action_useragentstylesheets.py', + 'webkit/build/rule_binding.py', + 'webkit/build/rule_bison.py', + 'webkit/build/rule_gperf.py', + 'webkit/tools/test_shell/test_shell.gyp', + 'webkit/webkit.gyp', +] + + +def Main(argv): + if len(argv) != 3 or argv[1] not in ['push', 'pull']: + print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0] + return 1 + + path_to_chrome = argv[2] + + for g in gyps: + chrome_file = os.path.join(path_to_chrome, g) + local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1]) + if argv[1] == 'push': + print 'Copying %s to %s' % (local_file, chrome_file) + shutil.copyfile(local_file, chrome_file) + elif argv[1] == 'pull': + print 'Copying %s to %s' % (chrome_file, local_file) + shutil.copyfile(chrome_file, local_file) + else: + assert False + + return 0 + + +if __name__ == '__main__': + sys.exit(Main(sys.argv)) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/samples/samples.bat b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/samples/samples.bat new file mode 100644 index 0000000000..778d9c90f0 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/samples/samples.bat @@ -0,0 +1,5 @@ +@rem Copyright (c) 2009 Google Inc. All rights reserved. +@rem Use of this source code is governed by a BSD-style license that can be +@rem found in the LICENSE file. + +@python %~dp0/samples %* diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/setup.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/setup.py new file mode 100755 index 0000000000..ed2b41a3c3 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/setup.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python + +# Copyright (c) 2009 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from distutils.core import setup +from distutils.command.install import install +from distutils.command.install_lib import install_lib +from distutils.command.install_scripts import install_scripts + +setup( + name='gyp', + version='0.1', + description='Generate Your Projects', + author='Chromium Authors', + author_email='chromium-dev@googlegroups.com', + url='http://code.google.com/p/gyp', + package_dir = {'': 'pylib'}, + packages=['gyp', 'gyp.generator'], + + scripts = ['gyp'], + cmdclass = {'install': install, + 'install_lib': install_lib, + 'install_scripts': install_scripts}, +) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/README b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/README new file mode 100644 index 0000000000..712e4efbb7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/README @@ -0,0 +1,15 @@ +pretty_vcproj: + Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2] + + They key/value pair are used to resolve vsprops name. + + For example, if I want to diff the base.vcproj project: + + pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt + pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt + + And you can use your favorite diff tool to see the changes. + + Note: In the case of base.vcproj, the original vcproj is one level up the generated one. + I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt + before you perform the diff. \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/Xcode/README b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/Xcode/README new file mode 100644 index 0000000000..2492a2c2f8 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/Xcode/README @@ -0,0 +1,5 @@ +Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in + +~/Library/Application Support/Developer/Shared/Xcode/Specifications/ + +and restart Xcode. \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/Xcode/Specifications/gyp.pbfilespec b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/Xcode/Specifications/gyp.pbfilespec new file mode 100644 index 0000000000..85e2e268a5 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/Xcode/Specifications/gyp.pbfilespec @@ -0,0 +1,27 @@ +/* + gyp.pbfilespec + GYP source file spec for Xcode 3 + + There is not much documentation available regarding the format + of .pbfilespec files. As a starting point, see for instance the + outdated documentation at: + http://maxao.free.fr/xcode-plugin-interface/specifications.html + and the files in: + /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ + + Place this file in directory: + ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ +*/ + +( + { + Identifier = sourcecode.gyp; + BasedOn = sourcecode; + Name = "GYP Files"; + Extensions = ("gyp", "gypi"); + MIMETypes = ("text/gyp"); + Language = "xcode.lang.gyp"; + IsTextFile = YES; + IsSourceFile = YES; + } +) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/Xcode/Specifications/gyp.xclangspec b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/Xcode/Specifications/gyp.xclangspec new file mode 100644 index 0000000000..3b3506d319 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/Xcode/Specifications/gyp.xclangspec @@ -0,0 +1,226 @@ +/* + Copyright (c) 2011 Google Inc. All rights reserved. + Use of this source code is governed by a BSD-style license that can be + found in the LICENSE file. + + gyp.xclangspec + GYP language specification for Xcode 3 + + There is not much documentation available regarding the format + of .xclangspec files. As a starting point, see for instance the + outdated documentation at: + http://maxao.free.fr/xcode-plugin-interface/specifications.html + and the files in: + /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ + + Place this file in directory: + ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ +*/ + +( + + { + Identifier = "xcode.lang.gyp.keyword"; + Syntax = { + Words = ( + "and", + "or", + " "%s"' % (src, dst) + + print '}' + + +def main(): + if len(sys.argv) < 2: + print >>sys.stderr, __doc__ + print >>sys.stderr + print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0]) + return 1 + + edges = LoadEdges('dump.json', sys.argv[1:]) + + WriteGraph(edges) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/pretty_gyp.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/pretty_gyp.py new file mode 100755 index 0000000000..3749792aac --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/pretty_gyp.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python +# Copyright (c) 2011 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Pretty-prints the contents of a GYP file.""" + +import sys +import re + + +# Regex to remove comments when we're counting braces. +COMMENT_RE = re.compile(r'\s*#.*') + +# Regex to remove quoted strings when we're counting braces. +# It takes into account quoted quotes, and makes sure that the quotes match. +# NOTE: It does not handle quotes that span more than one line, or +# cases where an escaped quote is preceeded by an escaped backslash. +quote_re_str = r'(?P[\'"])(.*?)(? 0: + after = True + + # This catches the special case of a closing brace having something + # other than just whitespace ahead of it -- we don't want to + # unindent that until after this line is printed so it stays with + # the previous indentation level. + if cnt < 0 and closing_prefix_re.match(stripline): + after = True + return (cnt, after) + + +def prettyprint_input(lines): + """Does the main work of indenting the input based on the brace counts.""" + indent = 0 + basic_offset = 2 + last_line = "" + for line in lines: + if COMMENT_RE.match(line): + print line + else: + line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. + if len(line) > 0: + (brace_diff, after) = count_braces(line) + if brace_diff != 0: + if after: + print " " * (basic_offset * indent) + line + indent += brace_diff + else: + indent += brace_diff + print " " * (basic_offset * indent) + line + else: + print " " * (basic_offset * indent) + line + else: + print "" + last_line = line + + +def main(): + if len(sys.argv) > 1: + data = open(sys.argv[1]).read().splitlines() + else: + data = sys.stdin.read().splitlines() + # Split up the double braces. + lines = split_double_braces(data) + + # Indent and print the output. + prettyprint_input(lines) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/pretty_sln.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/pretty_sln.py new file mode 100755 index 0000000000..7013f2b660 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/pretty_sln.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python + +# Copyright (c) 2009 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Prints the information in a sln file in a diffable way. + + It first outputs each projects in alphabetical order with their + dependencies. + + Then it outputs a possible build order. +""" + +__author__ = 'nsylvain (Nicolas Sylvain)' + +import os +import re +import sys +import pretty_vcproj + +def BuildProject(project, built, projects, deps): + # if all dependencies are done, we can build it, otherwise we try to build the + # dependency. + # This is not infinite-recursion proof. + for dep in deps[project]: + if dep not in built: + BuildProject(dep, built, projects, deps) + print project + built.append(project) + +def ParseSolution(solution_file): + # All projects, their clsid and paths. + projects = dict() + + # A list of dependencies associated with a project. + dependencies = dict() + + # Regular expressions that matches the SLN format. + # The first line of a project definition. + begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942' + '}"\) = "(.*)", "(.*)", "(.*)"$')) + # The last line of a project definition. + end_project = re.compile('^EndProject$') + # The first line of a dependency list. + begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$') + # The last line of a dependency list. + end_dep = re.compile('EndProjectSection$') + # A line describing a dependency. + dep_line = re.compile(' *({.*}) = ({.*})$') + + in_deps = False + solution = open(solution_file) + for line in solution: + results = begin_project.search(line) + if results: + # Hack to remove icu because the diff is too different. + if results.group(1).find('icu') != -1: + continue + # We remove "_gyp" from the names because it helps to diff them. + current_project = results.group(1).replace('_gyp', '') + projects[current_project] = [results.group(2).replace('_gyp', ''), + results.group(3), + results.group(2)] + dependencies[current_project] = [] + continue + + results = end_project.search(line) + if results: + current_project = None + continue + + results = begin_dep.search(line) + if results: + in_deps = True + continue + + results = end_dep.search(line) + if results: + in_deps = False + continue + + results = dep_line.search(line) + if results and in_deps and current_project: + dependencies[current_project].append(results.group(1)) + continue + + # Change all dependencies clsid to name instead. + for project in dependencies: + # For each dependencies in this project + new_dep_array = [] + for dep in dependencies[project]: + # Look for the project name matching this cldis + for project_info in projects: + if projects[project_info][1] == dep: + new_dep_array.append(project_info) + dependencies[project] = sorted(new_dep_array) + + return (projects, dependencies) + +def PrintDependencies(projects, deps): + print "---------------------------------------" + print "Dependencies for all projects" + print "---------------------------------------" + print "-- --" + + for (project, dep_list) in sorted(deps.items()): + print "Project : %s" % project + print "Path : %s" % projects[project][0] + if dep_list: + for dep in dep_list: + print " - %s" % dep + print "" + + print "-- --" + +def PrintBuildOrder(projects, deps): + print "---------------------------------------" + print "Build order " + print "---------------------------------------" + print "-- --" + + built = [] + for (project, dep_list) in sorted(deps.items()): + if project not in built: + BuildProject(project, built, projects, deps) + + print "-- --" + +def PrintVCProj(projects): + + for project in projects: + print "-------------------------------------" + print "-------------------------------------" + print project + print project + print project + print "-------------------------------------" + print "-------------------------------------" + + project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]), + projects[project][2])) + + pretty = pretty_vcproj + argv = [ '', + project_path, + '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]), + ] + argv.extend(sys.argv[3:]) + pretty.main(argv) + +def main(): + # check if we have exactly 1 parameter. + if len(sys.argv) < 2: + print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0] + return 1 + + (projects, deps) = ParseSolution(sys.argv[1]) + PrintDependencies(projects, deps) + PrintBuildOrder(projects, deps) + + if '--recursive' in sys.argv: + PrintVCProj(projects) + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/pretty_vcproj.py b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/pretty_vcproj.py new file mode 100755 index 0000000000..ba65bf2386 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp/tools/pretty_vcproj.py @@ -0,0 +1,330 @@ +#!/usr/bin/env python + +# Copyright (c) 2009 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Make the format of a vcproj really pretty. + + This script normalize and sort an xml. It also fetches all the properties + inside linked vsprops and include them explicitly in the vcproj. + + It outputs the resulting xml to stdout. +""" + +__author__ = 'nsylvain (Nicolas Sylvain)' + +import os +import sys + +from xml.dom.minidom import parse +from xml.dom.minidom import Node + +REPLACEMENTS = dict() +ARGUMENTS = None + + +class CmpTuple(object): + """Compare function between 2 tuple.""" + def __call__(self, x, y): + (key1, value1) = x + (key2, value2) = y + return cmp(key1, key2) + + +class CmpNode(object): + """Compare function between 2 xml nodes.""" + + def get_string(self, node): + node_string = "node" + node_string += node.nodeName + if node.nodeValue: + node_string += node.nodeValue + + if node.attributes: + # We first sort by name, if present. + node_string += node.getAttribute("Name") + + all_nodes = [] + for (name, value) in node.attributes.items(): + all_nodes.append((name, value)) + + all_nodes.sort(CmpTuple()) + for (name, value) in all_nodes: + node_string += name + node_string += value + + return node_string + + def __call__(self, x, y): + return cmp(self.get_string(x), self.get_string(y)) + + +def PrettyPrintNode(node, indent=0): + if node.nodeType == Node.TEXT_NODE: + if node.data.strip(): + print '%s%s' % (' '*indent, node.data.strip()) + return + + if node.childNodes: + node.normalize() + # Get the number of attributes + attr_count = 0 + if node.attributes: + attr_count = node.attributes.length + + # Print the main tag + if attr_count == 0: + print '%s<%s>' % (' '*indent, node.nodeName) + else: + print '%s<%s' % (' '*indent, node.nodeName) + + all_attributes = [] + for (name, value) in node.attributes.items(): + all_attributes.append((name, value)) + all_attributes.sort(CmpTuple()) + for (name, value) in all_attributes: + print '%s %s="%s"' % (' '*indent, name, value) + print '%s>' % (' '*indent) + if node.nodeValue: + print '%s %s' % (' '*indent, node.nodeValue) + + for sub_node in node.childNodes: + PrettyPrintNode(sub_node, indent=indent+2) + print '%s' % (' '*indent, node.nodeName) + + +def FlattenFilter(node): + """Returns a list of all the node and sub nodes.""" + node_list = [] + + if (node.attributes and + node.getAttribute('Name') == '_excluded_files'): + # We don't add the "_excluded_files" filter. + return [] + + for current in node.childNodes: + if current.nodeName == 'Filter': + node_list.extend(FlattenFilter(current)) + else: + node_list.append(current) + + return node_list + + +def FixFilenames(filenames, current_directory): + new_list = [] + for filename in filenames: + if filename: + for key in REPLACEMENTS: + filename = filename.replace(key, REPLACEMENTS[key]) + os.chdir(current_directory) + filename = filename.strip('"\' ') + if filename.startswith('$'): + new_list.append(filename) + else: + new_list.append(os.path.abspath(filename)) + return new_list + + +def AbsoluteNode(node): + """Makes all the properties we know about in this node absolute.""" + if node.attributes: + for (name, value) in node.attributes.items(): + if name in ['InheritedPropertySheets', 'RelativePath', + 'AdditionalIncludeDirectories', + 'IntermediateDirectory', 'OutputDirectory', + 'AdditionalLibraryDirectories']: + # We want to fix up these paths + path_list = value.split(';') + new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1])) + node.setAttribute(name, ';'.join(new_list)) + if not value: + node.removeAttribute(name) + + +def CleanupVcproj(node): + """For each sub node, we call recursively this function.""" + for sub_node in node.childNodes: + AbsoluteNode(sub_node) + CleanupVcproj(sub_node) + + # Normalize the node, and remove all extranous whitespaces. + for sub_node in node.childNodes: + if sub_node.nodeType == Node.TEXT_NODE: + sub_node.data = sub_node.data.replace("\r", "") + sub_node.data = sub_node.data.replace("\n", "") + sub_node.data = sub_node.data.rstrip() + + # Fix all the semicolon separated attributes to be sorted, and we also + # remove the dups. + if node.attributes: + for (name, value) in node.attributes.items(): + sorted_list = sorted(value.split(';')) + unique_list = [] + [unique_list.append(i) for i in sorted_list if not unique_list.count(i)] + node.setAttribute(name, ';'.join(unique_list)) + if not value: + node.removeAttribute(name) + + if node.childNodes: + node.normalize() + + # For each node, take a copy, and remove it from the list. + node_array = [] + while node.childNodes and node.childNodes[0]: + # Take a copy of the node and remove it from the list. + current = node.childNodes[0] + node.removeChild(current) + + # If the child is a filter, we want to append all its children + # to this same list. + if current.nodeName == 'Filter': + node_array.extend(FlattenFilter(current)) + else: + node_array.append(current) + + + # Sort the list. + node_array.sort(CmpNode()) + + # Insert the nodes in the correct order. + for new_node in node_array: + # But don't append empty tool node. + if new_node.nodeName == 'Tool': + if new_node.attributes and new_node.attributes.length == 1: + # This one was empty. + continue + if new_node.nodeName == 'UserMacro': + continue + node.appendChild(new_node) + + +def GetConfiguationNodes(vcproj): + #TODO(nsylvain): Find a better way to navigate the xml. + nodes = [] + for node in vcproj.childNodes: + if node.nodeName == "Configurations": + for sub_node in node.childNodes: + if sub_node.nodeName == "Configuration": + nodes.append(sub_node) + + return nodes + + +def GetChildrenVsprops(filename): + dom = parse(filename) + if dom.documentElement.attributes: + vsprops = dom.documentElement.getAttribute('InheritedPropertySheets') + return FixFilenames(vsprops.split(';'), os.path.dirname(filename)) + return [] + +def SeekToNode(node1, child2): + # A text node does not have properties. + if child2.nodeType == Node.TEXT_NODE: + return None + + # Get the name of the current node. + current_name = child2.getAttribute("Name") + if not current_name: + # There is no name. We don't know how to merge. + return None + + # Look through all the nodes to find a match. + for sub_node in node1.childNodes: + if sub_node.nodeName == child2.nodeName: + name = sub_node.getAttribute("Name") + if name == current_name: + return sub_node + + # No match. We give up. + return None + + +def MergeAttributes(node1, node2): + # No attributes to merge? + if not node2.attributes: + return + + for (name, value2) in node2.attributes.items(): + # Don't merge the 'Name' attribute. + if name == 'Name': + continue + value1 = node1.getAttribute(name) + if value1: + # The attribute exist in the main node. If it's equal, we leave it + # untouched, otherwise we concatenate it. + if value1 != value2: + node1.setAttribute(name, ';'.join([value1, value2])) + else: + # The attribute does nto exist in the main node. We append this one. + node1.setAttribute(name, value2) + + # If the attribute was a property sheet attributes, we remove it, since + # they are useless. + if name == 'InheritedPropertySheets': + node1.removeAttribute(name) + + +def MergeProperties(node1, node2): + MergeAttributes(node1, node2) + for child2 in node2.childNodes: + child1 = SeekToNode(node1, child2) + if child1: + MergeProperties(child1, child2) + else: + node1.appendChild(child2.cloneNode(True)) + + +def main(argv): + global REPLACEMENTS + global ARGUMENTS + ARGUMENTS = argv + """Main function of this vcproj prettifier.""" + + # check if we have exactly 1 parameter. + if len(argv) < 2: + print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' + '[key2=value2]' % argv[0]) + return 1 + + # Parse the keys + for i in range(2, len(argv)): + (key, value) = argv[i].split('=') + REPLACEMENTS[key] = value + + # Open the vcproj and parse the xml. + dom = parse(argv[1]) + + # First thing we need to do is find the Configuration Node and merge them + # with the vsprops they include. + for configuration_node in GetConfiguationNodes(dom.documentElement): + # Get the property sheets associated with this configuration. + vsprops = configuration_node.getAttribute('InheritedPropertySheets') + + # Fix the filenames to be absolute. + vsprops_list = FixFilenames(vsprops.strip().split(';'), + os.path.dirname(argv[1])) + + # Extend the list of vsprops with all vsprops contained in the current + # vsprops. + for current_vsprops in vsprops_list: + vsprops_list.extend(GetChildrenVsprops(current_vsprops)) + + # Now that we have all the vsprops, we need to merge them. + for current_vsprops in vsprops_list: + MergeProperties(configuration_node, + parse(current_vsprops).documentElement) + + # Now that everything is merged, we need to cleanup the xml. + CleanupVcproj(dom.documentElement) + + # Finally, we use the prett xml function to print the vcproj back to the + # user. + #print dom.toprettyxml(newl="\n") + PrettyPrintNode(dom.documentElement) + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/deps/npm/node_modules/node-gyp/legacy/tools/gyp_addon b/deps/npm/node_modules/node-gyp/legacy/tools/gyp_addon new file mode 100755 index 0000000000..b6f2d643b2 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/legacy/tools/gyp_addon @@ -0,0 +1,42 @@ +#!/usr/bin/env python +import os +import sys + +script_dir = os.path.dirname(__file__) +node_root = os.path.abspath(os.path.join(script_dir, os.pardir)) +module_root = os.getcwd() +if sys.platform == 'win32': + output_dir = os.path.join(module_root, 'build') +else: + output_dir = 'build' + +sys.path.insert(0, os.path.join(node_root, 'tools', 'gyp', 'pylib')) +import gyp + +if __name__ == '__main__': + args = sys.argv[1:] + addon_gypi = os.path.join(node_root, 'tools', 'addon.gypi') + common_gypi = os.path.join(node_root, 'common.gypi') + config_gypi = os.path.join(node_root, 'config.gypi') + args.extend(['-I', addon_gypi]) + args.extend(['-I', common_gypi]) + if os.path.exists(config_gypi): + args.extend(['-I', config_gypi]) + args.extend(['-Dlibrary=shared_library']) + args.extend(['-Dvisibility=default']) + args.extend(['-Dnode_root_dir=%s' % node_root]) + args.extend(['-Dmodule_root_dir=%s' % module_root]) + args.extend(['--depth=.']); + + # Tell gyp to write the Makefile/Solution files into output_dir + args.extend(['--generator-output', output_dir]) + + # Tell make to write its output into the same dir + args.extend(['-Goutput_dir=.']) + + gyp_args = list(args) + rc = gyp.main(gyp_args) + if rc != 0: + print 'Error running GYP' + sys.exit(rc) + diff --git a/deps/npm/node_modules/node-gyp/lib/build.js b/deps/npm/node_modules/node-gyp/lib/build.js new file mode 100644 index 0000000000..f6c63a6f7b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/build.js @@ -0,0 +1,178 @@ + +module.exports = exports = build + +/** + * Module dependencies. + */ + +var fs = require('fs') + , path = require('path') + , glob = require('glob') + , which = require('which') + , asyncEmit = require('./util/asyncEmit') + , createHook = require('./util/hook') + , win = process.platform == 'win32' + +exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module' + +function build (gyp, argv, callback) { + + gyp.verbose('build args', argv) + var command = win ? 'msbuild' : 'make' + , buildDir = path.resolve('build') + , configPath = path.resolve(buildDir, 'config.gypi') + , config + , emitter + + createHook('gyp-build.js', function (err, _e) { + if (err) return callback(err) + emitter = _e + loadConfigGypi() + }) + + /** + * Load the "config.gypi" file that was generated during "configure". + */ + + function loadConfigGypi () { + fs.readFile(configPath, 'utf8', function (err, data) { + if (err) return callback(err) + config = JSON.parse(data.replace(/\#.+\n/, '')) + if (win) { + findSolutionFile() + } else { + doWhich() + } + }) + } + + /** + * On Windows, find first build/*.sln file. + */ + + function findSolutionFile () { + glob('build/*.sln', function (err, files) { + if (err) return callback(err) + if (files.length === 0) { + return callback(new Error('Could not find *.sln file. Did you run "configure"?')) + } + guessedSolution = files[0] + gyp.verbose('found first Solution file', guessedSolution) + doWhich() + }) + } + + function doWhich () { + // First make sure we have the build command in the PATH + which(command, function (err, execPath) { + if (err) { + if (win && /not found/.test(err.message)) { + // On windows and no 'msbuild' found. Let's guess where it is + guessMsbuild() + } else { + // Some other error or 'make' not found on Unix, report that to the user + callback(err) + } + return + } + gyp.verbose('`which` succeeded for `' + command + '`', execPath) + build() + }) + } + + /** + * Guess the location of the "msbuild.exe" file on Windows. + */ + + function guessMsbuild () { + gyp.verbose('could not find "msbuild.exe". guessing location') + // This is basically just hard-coded. If this causes problems + // then we'll think of something more clever. + var windir = process.env.WINDIR || process.env.SYSTEMROOT || 'C:\\WINDOWS' + , frameworkDir = path.resolve(windir, 'Microsoft.NET', 'Framework') + , versionDir = path.resolve(frameworkDir, 'v4.0.30319') // This is probably the most brittle part... + , msbuild = path.resolve(versionDir, 'msbuild.exe') + fs.stat(msbuild, function (err, stat) { + if (err) { + if (err.code == 'ENOENT') { + callback(new Error('Can\'t find "msbuild.exe". Do you have Microsoft Visual Studio C++ 2010 installed?')) + } else { + callback(err) + } + return + } + command = msbuild + build() + }) + } + + /** + * Actually spawn the process and compile the module. + */ + + function build () { + var buildType = config.target_defaults.default_configuration + , platform = config.target_arch == 'x64' ? '64' : '32' + + if (gyp.opts.debug) { + buildType = 'Debug' + } + + // Enable Verbose build + if (!win && gyp.opts.verbose) { + argv.push('V=1') + } + if (win && !gyp.opts.verbose) { + argv.push('/clp:Verbosity=minimal') + } + + // Turn off the Microsoft logo on Windows + if (win) { + argv.push('/nologo') + } + + // Specify the build type, Release by default + if (win) { + argv.push('/p:Configuration=' + buildType + ';Platform=Win' + platform) + } else { + argv.push('BUILDTYPE=' + buildType) + // Invoke the Makefile in the 'build' dir. + argv.push('-C') + argv.push('build') + } + + if (win) { + // did the user specify their own .sln file? + var hasSln = argv.some(function (arg) { + return path.extname(arg) == '.sln' + }) + if (!hasSln) { + argv.unshift(gyp.opts.solution || guessedSolution) + } + } + + asyncEmit(emitter, 'before', function (err) { + if (err) return callback(err) + var proc = gyp.spawn(command, argv) + proc.on('exit', onExit) + }) + } + + /** + * Invoked after the make/msbuild command exits. + */ + + function onExit (code, signal) { + asyncEmit(emitter, 'after', function (err) { + if (err) return callback(err) + if (code !== 0) { + return callback(new Error('`' + command + '` failed with exit code: ' + code)) + } + if (signal) { + return callback(new Error('`' + command + '` got signal: ' + signal)) + } + callback() + }) + } + +} diff --git a/deps/npm/node_modules/node-gyp/lib/clean.js b/deps/npm/node_modules/node-gyp/lib/clean.js new file mode 100644 index 0000000000..a8efc1bd75 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/clean.js @@ -0,0 +1,42 @@ + +module.exports = exports = clean + +exports.usage = 'Removes any generated build files and the "out" dir' + +/** + * Module dependencies. + */ + +var rm = require('rimraf') + , asyncEmit = require('./util/asyncEmit') + , createHook = require('./util/hook') + + +function clean (gyp, argv, callback) { + + // Remove the 'build' dir + var buildDir = 'build' + , emitter + + createHook('gyp-clean.js', function (err, _e) { + if (err) return callback(err) + emitter = _e + asyncEmit(emitter, 'before', function (err) { + if (err) return callback(err) + doClean() + }) + }) + + function doClean () { + gyp.verbose('removing "build" directory') + rm(buildDir, after) + } + + function after () { + asyncEmit(emitter, 'after', function (err) { + if (err) return callback(err) + callback() + }) + } + +} diff --git a/deps/npm/node_modules/node-gyp/lib/configure.js b/deps/npm/node_modules/node-gyp/lib/configure.js new file mode 100644 index 0000000000..ac204b15e8 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/configure.js @@ -0,0 +1,201 @@ + +module.exports = exports = configure + +/** + * Module dependencies. + */ + +var fs = require('fs') + , path = require('path') + , glob = require('glob') + , which = require('which') + , semver = require('semver') + , mkdirp = require('./util/mkdirp') + , createHook = require('./util/hook') + , asyncEmit = require('./util/asyncEmit') + , win = process.platform == 'win32' + +exports.usage = 'Generates ' + (win ? 'MSVC project files' : 'a Makefile') + ' for the current module' + +function configure (gyp, argv, callback) { + + var python = gyp.opts.python || 'python' + , buildDir = path.resolve('build') + , configPath + , emitter + , versionStr + , version + + // Very first step is to load up the user-defined 'gyp-configure.js' file if it + // exists. We relay filecycle events using the eventemitter returned from this + createHook('gyp-configure.js', function (err, _e) { + if (err) return callback(err) + emitter = _e + checkPython() + }) + + // Make sure that Python is in the $PATH + function checkPython () { + which(python, function (err, execPath) { + if (err) { + if (win) { + guessPython() + } else { + failNoPython() + } + return + } + gyp.verbose('`which` succeeded for `' + python + '`', execPath) + getVersion() + }) + } + + // Called on Windows when "python" isn't available in the current $PATH. + // We're gonna glob C:\python2* + function guessPython () { + gyp.verbose('could not find "' + python + '". guessing location') + var rootDir = process.env.HOMEDIR || process.env.SystemDrive || 'C:\\' + if (rootDir[rootDir.length - 1] !== '\\') { + rootDir += '\\' + } + var pythonPath = path.resolve(rootDir, 'Python27', 'python.exe') + gyp.verbose('ensuring that file exists:', pythonPath) + fs.stat(pythonPath, function (err, stat) { + if (err) { + if (err.code == 'ENOENT') { + failNoPython() + } else { + callbackk(err) + } + return + } + python = pythonPath + getVersion() + }) + } + + function failNoPython () { + callback(new Error('Python does not seem to be installed')) + } + + function getVersion () { + if (gyp.opts.target) { + // if --target was given, then ensure that version is installed + versionStr = gyp.opts.target + gyp.verbose('compiling against --target node version', versionStr) + } else { + // if no --target was specified then use the current host node version + versionStr = process.version + gyp.verbose('no --target version specified, falling back to host node version', versionStr) + } + version = semver.parse(versionStr) + if (!version) { + return callback(new Error('Invalid version number: ' + versionStr)) + } + version = version.slice(1, 4).join('.') + gyp.opts.ensure = true + gyp.commands.install([ version ], createBuildDir) + } + + function createBuildDir (err) { + if (err) return callback(err) + gyp.verbose('attempting to create "build" dir', buildDir) + mkdirp(buildDir, function (err, isNew) { + if (err) return callback(err) + gyp.verbose('"build" dir needed to be created?', isNew) + createConfigFile() + }) + } + + function createConfigFile (err) { + if (err) return callback(err) + gyp.verbose('creating build/config.gypi file') + + var config = {} + configPath = path.resolve(buildDir, 'config.gypi') + + config.target_defaults = { + cflags: [] + , default_configuration: gyp.opts.debug ? 'Debug' : 'Release' + , defines: [] + , include_dirs: [] + , libraries: [] + } + + config.variables = { + target_arch: gyp.opts.arch || process.arch || 'ia32' + } + + var prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step' + , json = JSON.stringify(config, null, 2) + gyp.verbose('writing out config file', configPath) + fs.writeFile(configPath, [prefix, json, ''].join('\n'), runGypAddon) + } + + function runGypAddon (err) { + if (err) return callback(err) + + var devDir = path.resolve(process.env.HOME, '.node-gyp', version) + , gyp_addon = path.resolve(devDir, 'tools', 'gyp_addon') + + if (!~argv.indexOf('-f') && !~argv.indexOf('--format')) { + if (win) { + gyp.verbose('gyp format was not specified; forcing "msvs"') + // force the 'make' target for non-Windows + argv.push('-f', 'msvs') + } else { + gyp.verbose('gyp format was not specified; forcing "make"') + // force the 'make' target for non-Windows + argv.push('-f', 'make') + } + } + + function hasMsvsVersion () { + return argv.some(function (arg) { + return arg.indexOf('msvs_version') === 0 + }) + } + + if (win && !hasMsvsVersion()) { + if ('msvs_version' in gyp.opts) { + argv.push('-G', 'msvs_version=' + gyp.opts.msvs_version) + } else { + argv.push('-G', 'msvs_version=2010') + } + } + + // include the "config.gypi" file that was generated + argv.unshift('-I' + configPath) + + // enforce use of the "binding.gyp" file + argv.unshift('binding.gyp') + + // execute `gyp_addon` from the current target node version + argv.unshift(gyp_addon) + + asyncEmit(emitter, 'before', function (err) { + if (err) return callback(err) + + var cp = gyp.spawn(python, argv) + cp.on('exit', onCpExit) + }) + } + + /** + * Called when the `gyp_addon` child process exits. + */ + + function onCpExit (code, signal) { + asyncEmit(emitter, 'after', function (err) { + if (err) { + callback(err) + } else if (code !== 0) { + callback(new Error('`gyp_addon` failed with exit code: ' + code)) + } else { + // we're done + callback() + } + }) + } + +} diff --git a/deps/npm/node_modules/node-gyp/lib/install.js b/deps/npm/node_modules/node-gyp/lib/install.js new file mode 100644 index 0000000000..e064fbc14f --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/install.js @@ -0,0 +1,322 @@ + +module.exports = exports = install + +exports.usage = 'Install node development files for the specified node version.' + +/** + * Module dependencies. + */ + +var fs = require('fs') + , tar = require('tar') + , rm = require('rimraf') + , path = require('path') + , zlib = require('zlib') + , mkdir = require('mkdirp') + , semver = require('semver') + , fstream = require('fstream') + , request = require('request') + , minimatch = require('minimatch') + , distUrl = 'http://nodejs.org/dist' + , win = process.platform == 'win32' + +function install (gyp, argv, callback) { + + // ensure no double-callbacks happen + function cb (err) { + if (cb.done) return + cb.done = true + if (err) { + gyp.verbose('got an error, rolling back install') + // roll-back the install if anything went wrong + gyp.commands.remove([ version ], function (err2) { + callback(err) + }) + } else { + callback.apply(null, arguments) + } + } + + + // Determine which node dev files version we are installing + var versionStr = argv[0] || gyp.opts.target || process.version + gyp.verbose('input version string', versionStr) + + // parse the version to normalize and ensure it's valid + var version = semver.parse(versionStr) + if (!version) { + return callback(new Error('Invalid version number: ' + versionStr)) + } + + // "legacy" versions are 0.7 and 0.6 + var isLegacy = semver.lt(versionStr, '0.8.0') + gyp.verbose('installing legacy version?', isLegacy) + + if (semver.lt(versionStr, '0.6.0')) { + return callback(new Error('Minimum target version is `0.6` or greater. Got: ' + versionStr)) + } + + // 0.x.y-pre versions are not published yet. Use previous release. + if (version[5] === '-pre') { + version[3] = +version[3] - 1 + version[5] = null + } + + // flatten version into String + version = version.slice(1, 4).join('.') + gyp.verbose('installing version', version) + + + // TODO: Make ~/.node-gyp configurable + var devDir = path.resolve(process.env.HOME, '.node-gyp', version) + + // If '--ensure' was passed, then don't *always* install the version, + // check if it is already installed, and only install when needed + if (gyp.opts.ensure) { + gyp.verbose('--ensure was passed, so won\'t reinstall if already installed') + fs.stat(devDir, function (err, stat) { + if (err) { + if (err.code == 'ENOENT') { + gyp.verbose('version not already installed, continuing with install', version) + go() + } else { + callback(err) + } + return + } + gyp.verbose('version is already installed, need to check "installVersion"') + var installVersionFile = path.resolve(devDir, 'installVersion') + fs.readFile(installVersionFile, 'ascii', function (err, ver) { + if (err && err.code != 'ENOENT') { + return callback(err) + } + var installVersion = parseInt(ver, 10) || 0 + gyp.verbose('got "installVersion":', installVersion) + gyp.verbose('needs "installVersion":', gyp.package.installVersion) + if (installVersion < gyp.package.installVersion) { + gyp.verbose('version is no good; reinstalling') + go() + } else { + gyp.verbose('version is good') + callback() + } + }) + }) + } else { + go() + } + + + function go () { + + // first create the dir for the node dev files + mkdir(devDir, function (err) { + if (err) return cb(err) + + // TODO: Detect if it was actually created or if it already existed + gyp.verbose('created:', devDir) + + // now download the node tarball + var tarballUrl = distUrl + '/v' + version + '/node-v' + version + '.tar.gz' + , badDownload = false + , extractCount = 0 + , parser = tar.Parse() + + gyp.info('downloading:', tarballUrl) + + var requestOpts = { uri: tarballUrl } + var proxyUrl = gyp.opts.proxy || process.env.http_proxy || process.env.HTTP_PROXY + if (proxyUrl) { + gyp.verbose('using proxy:', proxyUrl) + requestOpts.proxy = proxyUrl + } + + request(requestOpts, downloadError) + .pipe(zlib.createGunzip()) + .pipe(parser) + parser.on('entry', onEntry) + parser.on('end', afterTarball) + + // something went wrong downloading the tarball? + function downloadError (err, res) { + if (err || res.statusCode != 200) { + badDownload = true + cb(err || new Error(res.statusCode + ' status code downloading tarball')) + } + } + + // handle a file from the tarball + function onEntry (entry) { + extractCount++ + + var filename = entry.props.path + , trimmed = install.trim(filename) + + if (!valid(trimmed)) { + // skip + return + } + + var dir = path.dirname(trimmed) + , devFileDir = path.resolve(devDir, dir) + , devFile = path.resolve(devDir, trimmed) + + if (dir !== '.') { + // TODO: async + // TODO: keep track of the dirs that have been created/checked so far + //console.error(devFileDir) + mkdir.sync(devFileDir) + } + // TODO: better "File" detection or use `fstream` + if (entry.props.type !== '0') { + return + } + //console.error(trimmed, entry.props) + + // Finally save the file to the filesystem + // TODO: Figure out why pipe() hangs here or use `fstream` + var ws = fs.createWriteStream(devFile, { + mode: entry.props.mode + }) + entry.on('data', function (b) { + ws.write(b) + }) + entry.on('end', function () { + ws.end() + gyp.verbose('saved file', devFile) + }) + + } + + function afterTarball () { + if (badDownload) return + if (extractCount === 0) { + return cb(new Error('There was a fatal problem while downloading the tarball')) + } + gyp.verbose('done parsing tarball') + var async = 0 + + if (isLegacy) { + // copy over the files from the `legacy` dir + async++ + copyLegacy(deref) + } + + if (win) { + // need to download node.lib + async++ + downloadNodeLib(deref) + } + + // write the "installVersion" file + async++ + var installVersionPath = path.resolve(devDir, 'installVersion') + fs.writeFile(installVersionPath, gyp.package.installVersion + '\n', deref) + + if (async === 0) { + // no async tasks required + cb() + } + + function deref (err) { + if (err) return cb(err) + --async || cb() + } + } + + function copyLegacy (done) { + // legacy versions of node (< 0.8) require the legacy files to be copied + // over since they contain many bugfixes from the current node build system + gyp.verbose('copying "legacy" gyp configuration files for version', version) + + var legacyDir = path.resolve(__dirname, '..', 'legacy') + gyp.verbose('using "legacy" dir', legacyDir) + gyp.verbose('copying to "dev" dir', devDir) + + var reader = fstream.Reader({ path: legacyDir, type: 'Directory' }) + , writer = fstream.Writer({ path: devDir, type: 'Directory' }) + + reader.on('entry', function onEntry (entry) { + gyp.verbose('reading entry', entry.path) + entry.on('entry', onEntry) + }) + + reader.on('error', done) + writer.on('error', done) + + // Like `cp -rpf` + reader.pipe(writer) + + reader.on('end', done) + } + + function downloadNodeLib (done) { + gyp.verbose('on Windows; need to download `node.lib`...') + // TODO: windows 64-bit support + var releaseDir = path.resolve(devDir, 'Release') + , debugDir = path.resolve(devDir, 'Debug') + , nodeLibUrl = distUrl + '/v' + version + '/node.lib' + + gyp.verbose('Release dir', releaseDir) + gyp.verbose('Debug dir', debugDir) + gyp.verbose('`node.lib` url', nodeLibUrl) + // TODO: parallelize mkdirs + mkdir(releaseDir, function (err) { + if (err) return done(err) + mkdir(debugDir, function (err) { + if (err) return done(err) + gyp.info('downloading `node.lib`', nodeLibUrl) + // TODO: clean this mess up, written in a hastemode-9000 + var badDownload = false + var res = request(nodeLibUrl, function (err, res) { + if (err || res.statusCode != 200) { + badDownload = true + done(err || new Error(res.statusCode + ' status code downloading node.lib')) + } + }) + var releaseDirNodeLib = path.resolve(releaseDir, 'node.lib') + , debugDirNodeLib = path.resolve(debugDir, 'node.lib') + , rws = fs.createWriteStream(releaseDirNodeLib) + , dws = fs.createWriteStream(debugDirNodeLib) + gyp.verbose('streaming to', releaseDirNodeLib) + gyp.verbose('streaming to', debugDirNodeLib) + res.pipe(rws) + res.pipe(dws) + res.on('end', function () { + if (badDownload) return + done() + }) + }) + }) + } + + + }) + + } + + /** + * Checks if a given filename is "valid" for this installation. + */ + + function valid (file) { + // header files + return minimatch(file, 'src/*.h') + || minimatch(file, 'deps/**/*.h') + // non-legacy versions of node also extract the gyp build files + || (!isLegacy && + (minimatch(file, '*.gypi') + || minimatch(file, 'tools/*.gypi') + || minimatch(file, 'tools/gyp_addon') + || (minimatch(file, 'tools/gyp/**') && !minimatch(file, 'tools/gyp/test/**')) + ) + ) + } + +} + + +install.trim = function trim (file) { + var firstSlash = file.indexOf('/') + return file.substring(firstSlash + 1) +} diff --git a/deps/npm/node_modules/node-gyp/lib/list.js b/deps/npm/node_modules/node-gyp/lib/list.js new file mode 100644 index 0000000000..6ecf1ba860 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/list.js @@ -0,0 +1,34 @@ + +module.exports = exports = list + +exports.usage = 'Prints a listing of the currently installed node development files' + +/** + * Module dependencies. + */ + +var fs = require('fs') + , path = require('path') + +function list (gyp, args, callback) { + + // TODO: Make ~/.node-gyp configurable + var nodeGypDir = path.resolve(process.env.HOME, '.node-gyp') + + gyp.verbose('using node-gyp dir', nodeGypDir) + + // readdir the node-gyp dir + fs.readdir(nodeGypDir, onreaddir) + + function onreaddir (err, versions) { + if (err && err.code != 'ENOENT') { + return callback(err) + } + if (versions) { + versions = versions.filter(function (v) { return v != 'current' }) + } else { + versions = [] + } + callback(null, versions) + } +} diff --git a/deps/npm/node_modules/node-gyp/lib/node-gyp.js b/deps/npm/node_modules/node-gyp/lib/node-gyp.js new file mode 100644 index 0000000000..efa53435eb --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/node-gyp.js @@ -0,0 +1,140 @@ + +module.exports = exports = gyp + +/** + * Module dependencies. + */ + +var fs = require('fs') + , path = require('path') + , nopt = require('nopt') + , child_process = require('child_process') + , EE = require('events').EventEmitter + , inherits = require('util').inherits + , commands = [ + // Module build commands + 'build' + , 'clean' + , 'configure' + , 'rebuild' + // Development Header File management commands + , 'install' + , 'list' + , 'remove' + ] + , aliases = { + 'ls': 'list' + , 'rm': 'remove' + } + +/** + * The `gyp` function. + */ + +function gyp () { + return new Gyp +} + +function Gyp () { + var me = this + + this.commands = {} + commands.forEach(function (command) { + me.commands[command] = function (argv, callback) { + me.verbose('command', command, argv) + return require('./' + command)(me, argv, callback) + } + }) +} +inherits(Gyp, EE) +exports.Gyp = Gyp +var proto = Gyp.prototype + +/** + * Export the contents of the package.json. + */ + +proto.package = require('../package') + +proto.configDefs = { + help: Boolean // everywhere + , arch: String // 'configure' + , msvs_version: String // 'configure' + , debug: Boolean // 'build' + , ensure: Boolean // 'install' + , verbose: Boolean // everywhere + , solution: String // 'build' (windows only) + , proxy: String // 'install' +} + +proto.shorthands = {} + +proto.parseArgv = function parseOpts (argv) { + this.opts = nopt(this.configDefs, this.shorthands, argv) + this.argv = this.opts.argv.remain.slice() + + var command = this.argv.shift() + this.command = aliases[command] || command +} + +/** + * Spawns a child process and emits a 'spawn' event. + */ + +proto.spawn = function spawn (command, args, opts) { + opts || (opts = {}) + if (!opts.silent && !opts.customFds) { + opts.customFds = [ 0, 1, 2 ] + } + var cp = child_process.spawn(command, args, opts) + this.emit('spawn', command, args, cp) + return cp +} + +/** + * Logging mechanisms. + */ + +proto.info = function info () { + var args = Array.prototype.slice.call(arguments) + args.unshift('info') + this.emit.apply(this, args) +} + +proto.verbose = function verbose () { + var args = Array.prototype.slice.call(arguments) + args.unshift('verbose') + this.emit.apply(this, args) +} + +proto.usageAndExit = function usageAndExit () { + var usage = [ + '' + , ' Usage: node-gyp [options]' + , '' + , ' where is one of:' + , commands.map(function (c) { + return ' - ' + c + ' - ' + require('./' + c).usage + }).join('\n') + , '' + , ' for specific command usage and options try:' + , ' $ node-gyp --help' + , '' + , 'node-gyp@' + this.version + ' ' + path.resolve(__dirname, '..') + ].join('\n') + + console.log(usage) + process.exit(4) +} + +/** + * Version number proxy. + */ + +Object.defineProperty(proto, 'version', { + get: function () { + return this.package.version + } + , enumerable: true +}) + diff --git a/deps/npm/node_modules/node-gyp/lib/rebuild.js b/deps/npm/node_modules/node-gyp/lib/rebuild.js new file mode 100644 index 0000000000..cb21f32c22 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/rebuild.js @@ -0,0 +1,20 @@ + +module.exports = exports = rebuild + +exports.usage = 'Runs "clean", "configure" and "build" all at once' + +function rebuild (gyp, argv, callback) { + + // first "clean" + gyp.commands.clean([], function (err) { + if (err) { + // don't bail + gyp.info(err.stack); + } + + gyp.commands.configure([], function (err) { + if (err) return callback(err); + gyp.commands.build([], callback); + }); + }); +} diff --git a/deps/npm/node_modules/node-gyp/lib/remove.js b/deps/npm/node_modules/node-gyp/lib/remove.js new file mode 100644 index 0000000000..e17279b591 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/remove.js @@ -0,0 +1,57 @@ + +module.exports = exports = remove + +exports.usage = 'Removes the node development files for the specified version' + +/** + * Module dependencies. + */ + +var fs = require('fs') + , rm = require('rimraf') + , path = require('path') + , semver = require('semver') + +function remove (gyp, argv, callback) { + + // TODO: Make ~/.node-gyp configurable + var nodeGypDir = path.resolve(process.env.HOME, '.node-gyp') + + gyp.verbose('using node-gyp dir', nodeGypDir) + + // get the user-specified version to remove + var v = argv[0] || gyp.opts.target + + if (!v) { + return callback(new Error('You must specify a version number to remove. Ex: "0.6.12"')) + } + + // parse the version to normalize and make sure it's valid + var version = semver.parse(v) + + if (!version) { + return callback(new Error('Invalid version number: ' + v)) + } + + // flatten the version Array into a String + version = version.slice(1, 4).join('.') + + var versionPath = path.resolve(nodeGypDir, version) + gyp.verbose('removing development files for version', version) + + // first check if its even installed + fs.stat(versionPath, function (err, stat) { + if (err) { + if (err.code == 'ENOENT') { + gyp.info('version was already not installed', version) + callback() + } else { + callback(err) + } + return + } + // Go ahead and delete the dir + rm(versionPath, callback) + }) + +} diff --git a/deps/npm/node_modules/node-gyp/lib/util/asyncEmit.js b/deps/npm/node_modules/node-gyp/lib/util/asyncEmit.js new file mode 100644 index 0000000000..eb3d29ffc6 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/util/asyncEmit.js @@ -0,0 +1,61 @@ + +/** + * An `asyncEmit()` function that accepts an EventEmitter, an Array of args, and + * a callback function. If the emitter listener function has an arity + * > args.length then there is an assumed callback function on the emitter, which + * means that it is doing some async work. We have to wait for the callbacks for + * any async listener functions. + * + * It works like this: + * + * var emitter = new EventEmitter + * + * // this is an async listener + * emitter.on('something', function (val, done) { + * // val may be any number of input arguments + * setTimeout(function () { + * done() + * }, 1000) + * }) + * + * // this is a sync listener, no callback function + * emitter.on('something', function (val) { + * + * }) + * + * asyncEmit(emitter, 'something', [ 5 ], function (err) { + * if (err) throw err + * console.log('DONE!') + * }) + */ + +module.exports = asyncEmit +function asyncEmit (emitter, eventName, args, callback) { + + if (typeof args == 'function') { + callback = args + args = [] + } + + var async = emitter.listeners(eventName).filter(function (func) { + return func.length > args.length + }).length + + var argv = [ eventName ].concat(args) + + // callback function + argv.push(function (err) { + if (err && !callback.called) { + callback.called = true + callback(err) + } + --async || callback() + }) + + // no async listeners + if (async === 0) { + process.nextTick(callback) + } + + return emitter.emit.apply(emitter, argv) +} diff --git a/deps/npm/node_modules/node-gyp/lib/util/hook.js b/deps/npm/node_modules/node-gyp/lib/util/hook.js new file mode 100644 index 0000000000..d31cc2e8aa --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/util/hook.js @@ -0,0 +1,48 @@ + +/** + * This is our "Hook" class that allows a script to hook into the lifecyle of the + * "configure", "build" and "clean" commands. It's basically a hack into the + * module.js file to allow custom hooks into the module-space, specifically to + * make the global scope act as an EventEmitter. + */ + +var fs = require('fs') + , path = require('path') + , Module = require('module') + , EventEmitter = require('events').EventEmitter + , functions = Object.keys(EventEmitter.prototype).filter(function (k) { + return typeof EventEmitter.prototype[k] == 'function' + }) + , boilerplate = functions.map(function (k) { + return 'var ' + k + ' = module.emitter.' + k + '.bind(module.emitter);' + }).join('') + +module.exports = createHook +function createHook (filename, callback) { + + var emitter = new EventEmitter + + // first read the file contents + fs.readFile(filename, 'utf8', function (err, code) { + if (err) { + if (err.code == 'ENOENT') { + // hook file doesn't exist, oh well + callback(null, emitter) + } else { + callback(err) + } + return + } + // got a hook file, now execute it + var mod = new Module(filename) + mod.filename = filename + mod.paths = Module._nodeModulePaths(filename) + mod.emitter = emitter + try { + mod._compile(boilerplate + code, filename) + } catch (e) { + return callback(e) + } + callback(null, emitter) + }) +} diff --git a/deps/npm/node_modules/node-gyp/lib/util/mkdirp.js b/deps/npm/node_modules/node-gyp/lib/util/mkdirp.js new file mode 100644 index 0000000000..62095c8e20 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/lib/util/mkdirp.js @@ -0,0 +1,29 @@ + +/** + * Tiny wrapper around substack's mkdirp module, to add a return value + * to it. `true` if any directories were created in the process, `false` + * if the target directory already existed. The `err` is still the first + * argument in case anything actually wrong happens. + */ + +var fs = require('fs') + , mkdirp_ = require('mkdirp') + +module.exports = function mkdirp (path, cb) { + // first stat() to see if the target exists + fs.stat(path, function (err) { + if (err) { + if (err.code == 'ENOENT') { + // doesn't exist, mkdirp it + mkdirp_(path, function (err) { + if (err) return cb(err) + cb(err, true) + }) + } else { + cb(err) + } + return + } + cb(err, false) + }) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/ansi/.npmignore b/deps/npm/node_modules/node-gyp/node_modules/ansi/.npmignore new file mode 100644 index 0000000000..3c3629e647 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/ansi/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/deps/npm/node_modules/node-gyp/node_modules/ansi/README.md b/deps/npm/node_modules/node-gyp/node_modules/ansi/README.md new file mode 100644 index 0000000000..e8d328ff3e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/ansi/README.md @@ -0,0 +1,89 @@ +ansi.js +========= +### Advanced ANSI formatting tool for Node.js + +![](http://f.cl.ly/items/0D3w3d1W443f2z3X361G/Screen%20Shot%202012-01-26%20at%202.18.31%20AM.png) + +`ansi.js` is a module for Node.js that provides an easy-to-use API for +writing ANSI escape codes to `Stream` instances. ANSI escape codes are used to do +fancy things in a terminal window, like render text in colors, delete characters, +lines, the entire window, or hide and show the cursor, and lots more! + +The code for the example in the screenshot above can be found in the `examples` +directory. + +#### Features: + + * 256 color support for the terminal! + * Works with *any* writable `Stream` instance. + * Allows you to move the cursor anywhere on the terminal window. + * Allows you to delete existing contents from the terminal window. + * Allows you to hide and show the cursor. + * Converts CSS color codes and RGB values into ANSI escape codes. + * Low-level; you are in control of when escape codes are used, it's not abstracted. + * Optional automatic cleanup of stream by before closing (still TODO). + + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install ansi +``` + + +Example +------- + +``` js +var ansi = require('ansi') + , cursor = ansi(process.stdout) + +// You can chain your calls forever: +cursor.red() // Set font color to red + .bg.blue() // Set background color to blue + .write('Hello World!') // Write 'Hello World!' to stdout + .reset() // When a bg color is set, call reset() before + // writing the trailing \n, to avoid Terminal glitches + .write('\n') // And a final \n to wrap things up + +// Rendering modes are persistent: +cursor.green().bold() + +// You can use the regular logging functions, text will be green +console.log('This is green, bold text') + +// To reset just the foreground color: +cursor.fg.reset() + +console.log('This will still be bold') +``` + + +License +------- + +(The MIT License) + +Copyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/ansi/lib/ansi.js b/deps/npm/node_modules/node-gyp/node_modules/ansi/lib/ansi.js new file mode 100644 index 0000000000..701b574382 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/ansi/lib/ansi.js @@ -0,0 +1,291 @@ + +/** + * Reference: http://en.wikipedia.org/wiki/ANSI_escape_code + */ + +/** + * Module dependencies. + */ + +var prefix = '\033[' // For all escape codes + , suffix = 'm'; // Only for color codes + +/** + * The ANSI escape sequences. + */ + +var codes = { + up: 'A' + , down: 'B' + , forward: 'C' + , back: 'D' + , nextLine: 'E' + , previousLine: 'F' + , horizontalAbsolute: 'G' + , eraseData: 'J' + , eraseLine: 'K' + , scrollUp: 'S' + , scrollDown: 'T' + , savePosition: 's' + , restorePosition: 'u' + , hide: '?25l' + , show: '?25h' +}; + +/** + * Rendering ANSI codes. + */ + +var styles = { + bold: 1 + , italic: 3 + , underline: 4 + , inverse: 7 +}; + +/** + * The negating ANSI code for the rendering modes. + */ + +var reset = { + bold: 22 + , italic: 23 + , underline: 24 + , inverse: 27 + , foreground: 39 + , background: 49 +}; + +/** + * The standard, styleable ANSI colors. + */ + +var colors = { + white: 37 + , grey: 90 + , black: 30 + , blue: 34 + , cyan: 36 + , green: 32 + , magenta: 35 + , red: 31 + , yellow: 33 +}; + + +/** + * Creates a Cursor instance based off the given `writable stream` instance. + */ + +function ansi (stream, options) { + return new Cursor(stream, options); +} +module.exports = exports = ansi; + +/** + * The `Cursor` class. + */ + +function Cursor (stream, options) { + this.stream = stream; + this.fg = this.foreground = new Foreground(this); + this.bg = this.background = new Background(this); +} +exports.Cursor = Cursor; + +/** + * The `Foreground` class. + */ + +function Foreground (cursor) { + this.cursor = cursor; +} +exports.Foreground = Foreground; + +/** + * The `Background` class. + */ + +function Background (cursor) { + this.cursor = cursor; +} +exports.Background = Background; + +/** + * Helper function that calls `write()` on the underlying Stream. + */ + +Cursor.prototype.write = function () { + this.stream.write.apply(this.stream, arguments); + return this; +} + +/** + * Set up the positional ANSI codes. + */ + +Object.keys(codes).forEach(function (name) { + var code = String(codes[name]); + Cursor.prototype[name] = function () { + var c = code; + if (arguments.length > 0) { + c = Math.round(arguments[0]) + code; + } + this.write(prefix + c); + return this; + } +}); + +/** + * Set up the functions for the rendering ANSI codes. + */ + +Object.keys(styles).forEach(function (style) { + var name = style[0].toUpperCase() + style.substring(1); + + Cursor.prototype[style] = function () { + this.write(prefix + styles[style] + suffix); + return this; + } + + Cursor.prototype['reset'+name] = function () { + this.write(prefix + reset[style] + suffix); + return this; + } +}); + +/** + * Setup the functions for the standard colors. + */ + +Object.keys(colors).forEach(function (color) { + Foreground.prototype[color] = function () { + this.cursor.write(prefix + colors[color] + suffix); + return this.cursor; + } + + var bgCode = colors[color] + 10; + Background.prototype[color] = function () { + this.cursor.write(prefix + bgCode + suffix); + return this.cursor; + } + + Cursor.prototype[color] = function () { + return this.foreground[color](); + } +}); + +/** + * Makes a beep sound! + */ + +Cursor.prototype.beep = function () { + this.write('\007'); + return this; +} + +/** + * Moves cursor to specific position + */ + +Cursor.prototype.goto = function (x, y) { + x = ~~x; + y = ~~y; + this.write(prefix + y + ';' + x + 'H'); + return this; +} + +/** + * Reset the foreground color. + */ + +Foreground.prototype.reset = function () { + this.cursor.write(prefix + reset.foreground + suffix); + return this.cursor; +} + +/** + * Reset the background color. + */ + +Background.prototype.reset = function () { + this.cursor.write(prefix + reset.background + suffix); + return this.cursor; +} + +/** + * Resets all ANSI formatting on the stream. + */ + +Cursor.prototype.reset = function () { + this.write(prefix + '0' + suffix); + return this; +} + +/** + * Sets the foreground color with the given RGB values. + * The closest match out of the 216 colors is picked. + */ + +Foreground.prototype.rgb = function (r, g, b) { + this.cursor.write(prefix + '38;5;' + rgb(r, g, b) + suffix); + return this.cursor; +} + +/** + * Sets the background color with the given RGB values. + * The closest match out of the 216 colors is picked. + */ + +Background.prototype.rgb = function (r, g, b) { + this.cursor.write(prefix + '48;5;' + rgb(r, g, b) + suffix); + return this.cursor; +} + +/** + * Same as `cursor.fg.rgb()`. + */ + +Cursor.prototype.rgb = function (r, g, b) { + return this.foreground.rgb(r, g, b); +} + +/** + * Accepts CSS color codes for use with ANSI escape codes. + * For example: `#FF000` would be bright red. + */ + +Foreground.prototype.hex = Background.prototype.hex = function (color) { + var rgb = hex(color); + return this.rgb(rgb[0], rgb[1], rgb[2]); +} + +/** + * Same as `cursor.fg.hex()`. + */ + +Cursor.prototype.hex = function (color) { + return this.foreground.hex(color); +} + +function rgb (r, g, b) { + var red = r / 255 * 5 + , green = g / 255 * 5 + , blue = b / 255 * 5; + return rgb5(red, green, blue); +} + +function rgb5 (r, g, b) { + var red = Math.round(r) + , green = Math.round(g) + , blue = Math.round(b); + return 16 + (red*36) + (green*6) + blue; +} + +function hex (color) { + var c = color[0] === '#' ? color.substring(1) : color + , r = c.substring(0, 2) + , g = c.substring(2, 4) + , b = c.substring(4, 6); + return [parseInt(r, 16), parseInt(g, 16), parseInt(b, 16)]; +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/ansi/nodejs.png b/deps/npm/node_modules/node-gyp/node_modules/ansi/nodejs.png new file mode 100644 index 0000000000000000000000000000000000000000..61f2116e11990c8e5a3eb22d850a56ba5a578e0a GIT binary patch literal 30997 zcmaI7b9g7g(=YmsZQHh;d}G_TZQI${b~d&)b~f5L8{4+C$<6OQ=RN2Cao;=hJTu+% zRDHU-yQZhAt0r1WK@tHL7Zv~jAV^DzsQ>^VW#8p=XwdKLEe$~7`-bBtuH~lcXyN8* z>|zcOHghyFCziH1wlr5UH#YNj9yjL)0KiDC)wJBS9 zc{v%I+M2r&o0wZ#I|z_oclD7HTbl`xYI4Xk%R7mhTUkr_xR|T@D5#nG*qZW~kqQYC z^Lz1r1K69p854Wi+c~)MdI^yJ7cTF2`9F`DNQwW8#LZTK^nVGZC9gy*>gZxl%)!Xa zV9LzNOw7r`$il|X#l=NW%*xEd%*4#f#KOwJ!o|zR%FE15{J#&<@77$*EO=GKB>s0> z-!%bJD>pYMUM40_PftcqHbzGmOC}Z`9-jYTu(C3IQ!u!CJGdEpF*vxA{WpS`xvQy* zwUe8*qXY4O5RFY7-Q5I8za{-&U9flhKd=t2|9hCe2aL(f*oldSk@-Je`foyc`Tu`X zd;9-G?dqmt{(t5D{{(hb^L8?4QZaXRbayfRo;VA#|1{;qE9zoy?B?jA=IChm-=nBx z<>=<d!^GZ9ox*0o|noElbkbWaDT3eg(3iF7uOR#W= zursrAv#^M>@NkN-3X8Cav$1h}pR7Fp%`4_;>TYlD;P&6VX8)I0^nc|2k1E(ZeYY%T z?qcm>ZYJU4XixlKUFNm^pJn0sAMyS-ui5`Bi}3%*%k(V_(|;!R|25J7{peeJ{!{!v zYWrRJKZMbR4`J8!V;+T`bK%^!miZPD| zA>+s~8-uJH!LJyxg4zY*ZC&}kUk1#)U$&q7=k%N9z{h98w}m$vgW$nu#x?;#NQz?T z!w?@kEj!=yWsbMQvZbM9+|x+`RI8vtZv`5SinbpUGMKMB7bk(ZSgH@7$ou-3ah<6KZ<+mv z%GxtACpp%~{Ys+x4JJMY38_%sd8$2;FXpkCLs9OP`R=RS^r53S22<0F?Ka29E zOH}$ovIGc-HhxpSq*j`Ja2{Rx~Q0!}1!zYqHjqiP=o6o^m-iOS=-`*mYG z+Gmp027ziJV58-MKjS%W;1b}yC~OCd?P(CGxM|;1D{iO!U^=-CIlnNpLS?d5;9`aM z9u;MVv!JcfXxR9Q45$0vs>G5x2qHYA#(NrC%Dh@g;PTL$=VSaSF5HFtrV{z+3-*N& zPp66e3P2&OZ9H!^72?$(~AUC zOcJd+3(pc#p-uWj+G&R%JBp>~K40Zg(>~Zk8nl+qD67{`d(K78e|11w+udJ9M?P9I z%yE6eYa~#rlZ9tWT}Gu4gQg)t15rKJQmu_`f9`uh<*F@g9K;(#n_#1Uo*h?>E_hD; zFxDM`00rZMjShmS82DKZeWntzh^|W#eSN|L7ax<#&PnKht+EPv)=0W+B835uOUM_8 z%gm{)rAbtgg>W21ccP_T2tQAP+`a5CF?rAsq^xgkK?TBcvhOMxy`6+%zTgXfxgK8Q z>KDu-DkWE6v?>h3Vq;u%OjcqUPXYoOYSummZKCv6IxfsrI?QACQ1$u=fJro!Qegi= zk*}5z3Tca+CM9W-PsF^dI_KMaxT_VJ4aW#ogo9zG%`60&bL~V))rxeh7K#U2*@DOs7+A5yoZ7xTO?gx2?3w`BET{+RBamNFvyOUM zrk99SXJw+G3L#8(Y`K~5Xbz;K&C7f;cwRs!zzGzUjJb`3shWQY?O*mPWj_^RIh?Xc+>{8K& z58W*lLo!56aPs6Gd4$_ik^l{J7hq@oQj3H%Vjh*BEKD!rECfo9fX$N|5nd_dfOs_H z0jlsA|s zDJteU;p{Byu%!wdX$~?R$ApYs(3yAQ=UC3zAc?xl-F8rJCUXh;EDvqDT?x>C0qAsw z?V_l}k`#4jR(U32L1`lmRO$Uc7-o=PsImttZ3NSj(cL*fmHW)Ia#aX|g>aID}F4WTUDG3I(5!os)8t{`Vd>*x_8mJ<(o7nkiZZxS$x= zVhqM5l`*4V_3VS<_T|V6cELJ8vL46yV)ki%6&1dixIExn(; zus482UY$LrRO&mC{B~h=R&SrsV9Z8C$m}mT;%+u*$g#hU^7IL`dP=TZ!jWVsocsOp zD#_AQs5VJVxH!xvI{b9u8h(^2wh#FJbLtZ=fcabx094_sK0{#}tBwsTa&(+zvrCTm-NJZ!_ICdIJS_&FX*R$u~>NQmBccpMIgLZdUWSP#hOIwk}#CbN8GF9vK6+HeRS@!MLBJjV~9P8ho_9wjg!rEa-PO<2!E7*DGFG`7Db|SwW!I~Vhjg{nA`geG*QTmql&qG^be6iMUDn)-G2uk4&k4y(LHn4-#i#SFj3kMDpsoW#f%Ae&G z7UD(ae5%S57bhnJ;8fc0koeZ5;mRHJK+0Ny&4TVeU(?s3#+Z35U#BlM#}c!LUQ$FI zU7-b$4yX(pT?MT%V$USyLiIh<`W;&tQt|r~K1&7HB_#|GJ{e*rqB1X>I1zbvo|t&n z^TtQVElw{1ILBJN0eb2K1#=&!{bC=yPwbdge&J>$xPyIniRn$q47v^sO!Y8l18UH% zD+Rp(g-Pu5lCm&W1`kXZbYLtW5t8nhkvRvkp?#CmMnZ`waBx-9a?}pV07jl0)E!u6 zwt0Ff!8UWQg&!fz;X#*t)!UUEmpY>bk?Fmfa1*3OvaL0O*$VTPJ`gM z+dG;wEbZuOQ~4fa(tgCTLu;RA7gZ-5PqD_*6w>>%wRR42#daVNX&Gfk<1sYEmO7fh zb{T`RR+Wqlb+6Ih}dd9;3LY#v*wv|z)E>#?)cof-F=6@Jc z>?nqp-^j~#n+|N zMP1<%_na@ZVErLnB=S;+AD{}5S(S0OJ5f5~mi-~=IglHADbqIS44NcD(ThJC#TTIU zk@GOUdFD08S|Oof0nx@>@FDJFt#@p*rjB(NKuK8y&9xiFB%aTTk+zdZ0$B}~k|KaV z5wh5hNoXOVO$6mw%1ZY2g3XH3yR1%r` zL-^m#{hR4kKOcgj8X1WX3i~)Env(0Nmi$Rk99y)sw8X5inaZ3=USKI869QnId{49& z(Zpg`v_`McsI)a?eF|atl@(ngM|n;ZL|g|80#1M$W$gB^B3wieE2zL1><`-SR0f2- zw_dcE=^aaVV_Jz_3g@y2Mm3dDUxWcCdASJb{ru>_UyO!fWSb?^zrroBkp9pH*_y^9^Tw@hn*pa`q!i$|JA)uA7f=_CnyBM`7YT=5aU9HRAQ`7XJKLLyW` zT>LD-=ttgbgk0mJ_%^+~ldNnWlwE&RAo4sxMgMO4L_ybkwOX~nh!Iv8K3`e_31h~R zI!U%Z@jCtFidp1P(yb{EnKPy4j0pz^B^XXdhZC=^#yPU2R9p@HTN)mDR47Y43`@R7 zk~admY0Kt_+OkW0GH{XT)=ty7J?t!^#Kf1XUTKAoXPF7ho9s_A{b2bZG|_-=RFRtrasmB4hM9}m zN&=33gMOenUJP?oYFD(lImkoIezy=a3KF(6vI;5&`6komr`&Z4!p%YDEfdl&u77NI;41)_<&7?um>u*V z@K+e!Uz)J#eSl4ErCAXpv38q59r|oPXL8w&r7(9112I0mfbn;QtrWM-?lo52L&1QOi|3%3Elc~~_=U-^Q82mKje zi3CE|wMxs_0_IB8t*ezr{b)}7p{jnoHpl^JF#KPoEBNe}z%1dGeKAdVo{=AZY+<0h zvO^)_msvLCEowN)SWq>)9*jH@@;s-K2~G?*1B3YU-K$tfV}-Wmtr_ zF@t^}j0YOCE$l1KjiPEt@IE+NJg|{XRdeOCE;Yz#q5ifUW1cZEHoPef#M&^EB@pn< zWMLM7F}zLYsjWiwsWKt9#*4t+7qsiHcPdRi4|iSNoGAUhY}Y`WFwP{ETq^cB5bAS= zqq0KtPWU=Xv2sOK(N+?I5cd*^IJLslrQ8y&-#bc)ZYyex*h*Y9)Ai1prxK8Sk4Bur zNbtzQgKiL!6}U*eUyy3UYtb&u%)xAdQj|?r)3<%MzaeXVLBt?xV^VKhN;pxHZrjSo zTtYXAD@f3B?j|xp0DRlA#+SBSZeS)jYMB;%+X_&%1ms z_kN~85%S*0GKnjf@|yJaE8WbgJ?Nm3quwYiDIw1$#F-#N6X&#eLaQxeFDM0{`f~$q zqn9S~A(0}5ww8xJLt9<|>_~N+7$(Jg&5u}$r$)uq*e0te=3B~d>!3^a#t_BSX>h9+ zN-@jogKq9_@s}-lhdilLj8g5A<`Qn`-kX&GX;BE#t8_{_&Se(n5}PF4IhmKZqla;jnz;t&AtSYM;DrvYLHGheNT=1q>kq3>(g%T#D=sVKn^P=yNa>OsaI*6lB0i>n#um!F?3n z)X5#Kf=5iFl%^MJ-i*!UOS$Ww_<<3?d)%%SvhbrL_0_mgjWtw98g8^Q-GRVf7)+o8 zWB3}rmdj0xVsP`?6^dTJ)9e1|k92r1G2hh4zgZpRRHeS8O;9QAIODJifQ0}*wl_&b z8XR^7erNxIDw?WjNwG#g^wZ}~(lSDA!LaJ0Le-;O=Fl-8HFCI-on2x#n}!tJXB2}m z=)IKaT&PJsctPaf8lVXL4eox2-r1EBHQ*sn!>K3$XuAV8!D&#PJy1XW=vHu{`jbkI zNkl0^9Gh9k3OhvH4J6UZ>Yw&$%NM-$Y+M}V_JhA>>+%2;q2Kx+PruLmNKjxRf;55g zP^1Tn`7x+nDj{%1lmLUWD;T_cu;O>Re4LMDvqvcyFQoNhpRh+xs-B3PU*Dyi5daoE z4kl}99(SCVxFz}-TahmcOm76UUr{rP+i+2qVuhTMjGaFnA3GLn;UJ?e5ET$$D&x(G} z%9+WGx9(=eTr-ZWTFTUK?Hn{`Mw3o34vvn@u=JB$lD@|5R|G520OXTI34NcNh?jK~ zRAAOL0@|%#oqLqGJDa^nx|5$~Qigdf+x{)XLjZ)pHv;ta;viI%=WI7X6Ye(mZ^9DnvI|$O4te6q+ipeDMIwTSoLIUFd(QG4%sFF z%{8i!s`@HldJwyNp#*ECjxBK1c~Yn8da&E+c)zg;MG4jm(w7G$`U|Xi?g% z6R##JI2v6clmmiQmMf5^N}o*cuAQp#a+@ck&=%MXWu$3Wn8bUt3JyAi+@c0_X(cYk zybn3O7D+Uj!CeE$JV^^_W)gzr(9yQ*1$2tk5ZGQXnr2Hyo<= z!=03nZXtvs{xs#!#0v8WH**zV9zJ4sLBB*efStvIik+rtAW(^%9;b-_k%(qaMjn@f za>EGX{8uz%i4%pB$8m-}@6k6>QOr8P?*Eh%t1h>cN;qnW(&~kP5UI%%>>*4lHE+Eu z1}uI`l(uOt7YgI5e7>l}TAc2NYlAO;W=@X$#SI;rU?u9y{u2aCO=URrK?*X7=T4ep zz{kJqUsS0cG3A}`xip1Gr6eT6++kX;mAcidwuPBd+Y_7a|bXDz0P-)~( z!QnRrzR2!%waZ=&`P4jAbz+Q(;F{*g60G#9SeF`1CTI!f;cN_sdlrv4C!DAll-;WN z8B&5RWCi?~AFm&diJfwhpK<)&E0N;LYp69Q>gC*Jl~j_;ew~9sGb}+Fp@IY=>U0($ zlGH@=`me#e;Ne#jS*l)sOvnO(8)PNmBS*LNmax!eEGzl19muJ=I<+8sVN@N=R*{1` z|IiGL?Y-J>edjySaiof@Z&1<_+0aptC&3egVuMt$K{(dv2mZnpAN=-6gUALRvq79- zt`Hkp!jrpsEOmAN5QB7GJT=JU70Z0tdBT*@-?2=IhTEUX6LAoqm1vzm;U`kL4sAoD zuC0T5498H35VPf$V_6TgzW^ShfG_?45M^|2tL?WE>z)y= z2-SVo0UH7xy869-h!sMdHxqR7vDu z-Oy__ZR$wW9x6hv#3tvIW0Pjdnv~Y;L|}F1(228tVQBa)ZU?~t%b&XCjajSQBNuHQ z3{-EH$;8J8El~3eV#X$uFFPI5CF<2RxX8(2)yRFuJNaAdFiIwnV61-Q)#@nQAyxSuC-KQ?eZV` zs3hD9KaO^MbUOG{N8e1!p)@q=hF3A7Rx1Ss%H5vZklS1;7L;0+OaI&JR0Gg_ir+2i zLul@0HMA_%fWq{N$U8yI(ZP`*I{b7ZDY?taSbO|x5I{)}YRJL~3BOd1sa~*xK@ky1 z>@K;MtcI;a-rlnB(AS5TMiGW;`xIpz>P0f(?@usAC2k>A8=lV2`j8rIEe7vL7Y5vo z{gFs3aUJ3YkspFT%oms5RDKZ6?v%s73F}ziv$;U1Zuop?pCvLsvnFRzL>$1D}xYUEvQtq+b}U z;xU^pg7=S1LL=%)kYU0pk$h7l9AAhGMLvVPJx#}d#I77AFE3!9QI`W#b)(ZPz~)Nw zBr>>M8f8T9hMV2!v(RUKf?u2E9O}HAH{Aq=`eC@(M3N%sK6;(dsv@Z{seHjftWQ^G z8bMkwZF(DCY%fC~ON+YCc zgMk$e3xi#I86Lo%4Vq4)0LNs+Y1^9A*YBETs&REyO)Mfe{AlRJle3fF7jo zj$68i{sCoT_tUay35&BM2SE<6pL}K8_!l^i-v8+}=wg>8!FD3KUEp3fqh_>P`E!y35+z7Jn_R?j6}^$O@C^vIuWR2E0vY8 zzypSaA}~f*p@qs5R|SAJz7~SIRv6P4H6NQztt{PT*$3nuAsd~$IilAaSWuZ>hi3R#vJ>3Ra^L9Pp= z$2%q?V`d^)6-I{z6{}M`iBufbAWbN&Xa}AEjODT#^VgKbWm)}ZN_Zg|(d-YTs7Kf( zAyU}kRT?pm!l~BySP7n*ZA0wmv_fJUDCxib)UAYPPr|&yBtbO2bW(`yz=cdH=l;TH zMhouOuzlm-aAqtBHiB?BK)EZ#k}_qv7Gd=yV6i>h>PjN4&}|pC<*{airkdo|%FyDbGzBIa@=Q=inM|a5bfnQX>xAcpN#ibP$qU_ z=g>wNQb7nmm08{;>|)p=;F5>yR7Cf9n7af2GMOYVDn{R`HcX@I&ED_p)WcBA2N3E} zCnZmuqc|W^+Lf{jqO{V86hT^}GZ#=Um>uuYT>SKqS`f1kFSPCvIR&U_IH=7k5~+=B z&24MH5ZC~zeAuKx@4H@tg-xRU>p>BANV6Qk)7Yz9*D8J$kMYAijV3d?6k4MBN?K%_wv;_WjB`)oTGZ)?y;_cj1O1Q3tMFg+L9BwL!3ja3RUewm`B zDV3&pG2)5_ z^Zv|JM~RUE(pe|O+C!VqX^qVZr=>7Ja<$$=8!SM`6ynR%g1dL--0S=X*X-rOA%)eo zPg;i*6<#T+m1AcPV_7U|4mw6+4e;o$(2uOs#tI+}EFu}cOIkWbwn2lhfSRFeUbKus z)3+Ta>lTJUXi<1LtC&&OMfend4PiKqR7gblhRadWy&4OMyiM@OPUWr zX@J<0bL(afSsLfO@C=F~xlQJBkuomP`y{J)mN|=d*A;my7<~o(cj~`E_4*R!YJOKe zFe&YMK#T-(bXYOZ5;Rx*I_I9Yl5bJc!ZisDl|3%CQd1pu1!L`-aT+IUHv=bW8Rt(U zuYDG`Cv;Y9H5zr?lyj}pZSF{E|GAiGu%0%6xL-`u2W&qQBfhwsa8_&VxGGjE0eT#r z+nq5}@iO@XmiiM=;_k{T3769!EHgxdOs^5U+DPor6)Q=GJ+?t~ti@eVu}mR5~K%GB&%70v01#l z-8HXuEHm0icbuCp@l;Cv@vVn-IIsq3zo57#*YJ2H8V670Dj2Xi-SjYz#(XV?G{wmT z?&Gq|*GmR>YH|?@Alx4+lYA^2Ng4-3+u<6}O2Ck&v|Nl#Ih1_gZ*mrrJroEuX*Ue; zj0ja;C{y^56USLZOMUAY#7lJd3SxEv?1i@ZH&`nTW{TJ!2rDq&EyE}rg_7Ba&}Jpn z3F+gYGe<%&j`2!h!t){_*|SKDdR|W+s<6|2?q|QpJ9dm6%%mkN1Uig(g*h~>o^hjP zfP@RddF>EF{=wK5H4QCb=4Q&Wv;9rdz|&)`f&6ExSp-5LDEY`gjvydGmjKFmLMnn3 z$YVfLXM#oYb4X`b2pQl42>lV2nKb1@HV#y!ihOT#fPtL#43p@=56!5f7o%ZFB!oOVB;ge8 zd*mq64!RiDWH9m#>gYIiYANus2%peQjQTRm=K-3OQ#*msq={+@h_Q-dr>#OTH(uKg z5XDmx?7aNYTvHX&peRQEXI`&C7^@OA0oqi9!SGS$3)Dmr_8rbMl`Pki+AB|F=pPtR zMORBkZ&ve7x)meN6%mF1Fz4^oNz33K_rVt@he|1J-6}9ER3}6(;wC&as=`M{;u?#` zU+tAdc_f0f>YDrWPwj-_uj7L+F`BpmfLzWVEUWhD1 zuh1jtmH`rG8TYgqwU88`;YdSH zSts}$SFYTC*mV+9z-m2zkfOj}R_oqB$@I0GwrpUbtBZ6nWPcE+pa8K=jF5k;jD@*G z#S59l(xT6A#$#vr4eGsBwNyomdl2O-`aw5)wPtOkQ*cn=k#F#p98Ok_vw;pnkkc?hbQnm#uAW`S*wS zNB99|mX^P}w?EF7@AH!*+BFFYv3W3+ZY2+RIF&Y?g)5PULC;(ts}y;yW(yq+{Itb= z?qAbIif$$i1-y?p+Z{JM(+o8JK#{ioIdvBT@Awh603C~f-r7jpzu4HvlhwU`zLG8- zX-1;jb?K*TF_t-XFx2#;fT6~xC&*`yA$VTlNnuR0jslm3XPp%}LOzxj*W>rm-7L#o zruOP_ui=stL+EaUV_Wa5((-S(AA?8>xr3(h1@eo`zr67L+#?OJBNuzaL--!OhU=*( z{7!4?^&aA6A(05DKGkS33cSvuyRLeB){UA3R2eY-kIM6V?$-56RQ;fGTOBgfFco~dNC+IIVwESsXH-Fy)C8hn&;$rc3ic9m1x- z_nw1g$ZNOa^!2>nX5V$Z#YgCUQRLELSI>!i={X2^mkY@x|6oMOH}%&rOu2I)AGDk^%I?6RwyQ`4VUr7Se4TZe-O0UZa;FKQ)$>g!6Q7U3VPhJ{jOsF;!qvv zKnAOcceGhuY-R1DvFR?ScX^~DG|>z|AxXQyE6jRK*44kPddG=0M8!_u5_2^_Px<%0 ze|;`w%m`ZU^ap=JNnCL5rK&3AuzH^jio>-zZvVZ<_zvFiSWYMP@PR!GIB{v9#|)zH zvHQ5S@qsLh`h#KBf+6w0e0>Z(EBxC#t*y1cK*jL>&R-cy;CcI{UaeWHeV?>@9W+1z zVXP#lw!|BvxpJ&&;tB|6sv=C;q%=q5ZMWYOUX@XjZuAJOqB?>S_w{3B^OCceJacIz ztApIPAwhtsznws;*_v8|P%q&f;-}So?Qn&mnN%_3)d+l;Ga64@%8XBC@M#&RJ%JQFT{2kbGRI*g9S^Zuf zRI_t(fZO1>KKJGY&a4A51)oP#uRbvL9W4JhGl8@v;1bY^eUqI4GE{sEb-V^Co}-yD z0fChnzQHd^sk6^2S6UxoP_4Rp0&-v2m_#P&|K8d6^3?vh;d#Dddv`!%nH!*?YrFmd zagF|??0dXU5@(41%KCeLZQ3w?XRC$rz&WESD1Bg<{*He?@B0X>`}J|QJLC!H*Sq(S z=eLJduDD)ZyiLafdrSnOBzjtSr@s4|YLdwKs2tde5cecd>wXl?z!1uv+2k&7y>vbT zLdLd=dG{_%Rw4~`+{Cl;V2mwR>E;)M+ur;!9Np*2 zQqT;1BXnf(s3A#G$|l9G2CP@=%7d%{<(1LjR9M647_$ znRHO%(Bb`kzDVYZ`g${6-SzNeL$@oQ<3pF%hi7eSw|^g4LOox>zP!A=QwtzA_lnKg zbZTNIrnm=b<9mx_x>PgM7Wc?h8eKvgjRYBjnp%$%OSqQz4r|J<(vgS7@S0f!>k~f$ z)*t8A#oJz@c{K&_1wc-BHrW>9>qeT{6sPKSRR{%1*6c+dJ|{WtT8Dm2yAy9QEsGvjaqwo%&XztEl(Dg8q3%+I)1>VK`5W2{{3z(%Tkl!Se}O zLeE*1?e92(<@1FVF8_>aRWMiACGfi8BHHP-!wf?Km*KIk&C9nqynvS>-Tco#^}ZAp z(YBcl(03D6QnBk?eEVT~=ZR4;W|bEjy()TlewvjjdglF36)!<0mKKMw4hH%L-1WAT zXDauDg;oi{WJdEvudjV)H6Pu_D6a@~G9!71V14sX??hL*PEHQTM|TnC6cq0uVQiFt zJtwl};1y_@RTk2q2%K(Om;Efy76snLXSp1={FZw>`5zZc4q0uUVNPeYm$qSCti5^A z!Xb;_6Az%o*?JR++8rT9^8Vy%C9Bo@HgyC@dw;SK@Np6IrA$vL6D!hp_OpkI1-q`z zk8~Q%2$cJuXMZ(*eL_9aapq|-e82L4?2oe^#MbfM4mJgwpK7J9YqWiz|E(U)1U|oW z8TL8t`aOCt9MFYo#C*c!PVwp(49+^QYGc3&kdT2Tx&M&8FSxSX)@u_9MoD2~Q=CA; zx%6){0P5Y|VG1m)wz3^+LFD~ep&y;~nHzMV$M+qy)4f%#L0Jfs`rvTbAk;MUzEqYZ zT)0jTTx}}j+VmO1bNm>GRmQtdR5=s~HSmNTIxnutaq)7P$n~oeuVCIR8p?b(vX3) zVloa5c)ya>sAo1A9;0I_@ZG~wP#9TFxss)W)4jb5xCzbSa@a!G(EWG5hvPGF2sIG9 zuluxO*f_iXHv>y>vvv2MN->!Rj~SX^K|z39?QGTb=rTbH#zqqs+@#?ZE!I16Bh75Y zjkV~(XI}jU`dlaNo14#&+@7&&@kK>Bq0b`;Pj2-+|X-{Y=6nyR%l;8C4fWclmZda-8+f z385sjps|^ITvfey+4PB+=u4ZV$AxwK&0$WF>#nnvs|Axs`i>S=75Xmb={(o%hs%kd z_4V_R1Rw690^B@(eIpST!en#9Y!4SPBsFYRyuWr>=C-I>FF3O_mmo|Oa5G}Eb-$Vg zKcE`i?X3s8r{(Enw)8p6PMIBO#Br8?7?3+pA@bQxb7<;wIq#J!Qkckl<&0@k`(u&M z{9Lgnc;-JkF|F_1TWcJTGBY%+7S!`TzvTM2i^Rm4rPU7{kw&dDc>i9jJ8-@~kyns% z50mB}H;N+^Gw$xKrq`SfI~7)cqhFO5npmAyTe`n3ljAn`L$^2H9-*)SLZst;&&b!v z-_T~ITO4q3&+E^w%Ley(IKF>AQfvQxhuyEb&z}t4v2M#beT*j)Jl+i+pF2S6sE{i% z+1PHYVn|AI8v0$IR^JK)4U}|la;BdYvvKW39S7vz=W|<@Y50u?hdVU=&u>;;H!P1Z z&btnR$mCCgHlq$6^n;`*yA8ZA({da>XVU02$u|t_BE4VP@6CojcXl?b->ilg;p4=s z3N~MamkJ$pHBre+64h?{4Z9R)rg$se`=J>>%-yOD>dzJi738Q|c83*IZ*WIH^N`lP zJL%=;L@zmP7Oyl7-8XN4=ih95L1NPDINdWW_q|{7S>I#G?>skVSJcdu4!JDEPao<4 zsW4zGm}LnWC@S=;^jwI#)%2d;r&TofDxc=*H8KmMq`GmJP>bsgeg0}2P9M7b3+s58 zsgB{b&zRqu=MFK}o(3Pvh)kYM$+oMi#@lCa$nbPpx&lB$71Out7ua%N!uC@JUy|eMM zhSYUlWpETM9f1T?Ydt~3&D5!>DcNHa?{fs%p+eLEjNrum-zT2=Q{1JpfkJ!b@6 z-;o>)+5_?uU0Tzq+7cH_Pw2R9bbgu;LdD05bjp1$OG(}szLh3lkis9G*G1}l$QeKK zg{LwSzQsw?R5jB6a2Y+d47e|zj6@>Z(&ag4YOT4E z^~pv3z@XcvW|CSS@aWZg(;q&VWTDyBM2UtpEzd`pL4?g>t8+o$_hoV$n!!71uEbom z2oh#4b(h9`682Z%DS>#;u*YAYLWRaoOG6I)s6U#XpIkn5(Mxa92OX=4cChDT@RmwH z;B~3Qlju@Vwmv>SzTophQs{LREH5wb4UGe}Ny0)S#1KqE!na_N*Jg(Lph-AUD5H$b z>jAgg@DVm&IKIB^a7YGzzV)y~xG9^{?K>yl$TtL@nBS=--#_n?H%L78o= zy_gS?@8RY z(ddj&93aH8qQ^?JemH57q?pq#d0*hJ)Iv=X_2}z7tWQs0PM66aXVtTqj!yO5b(SOu zIxJg~zd`g3F*_zZA=}V#2Yd|M9tYSy8M_MRtbMFn7IYjw<#5_*Wwe2=p5(5c|FRK- z0SxPctaMkk>@4ely9yQC2P%NPaf5t{;QgoZX>O8jUi!NqjkH|qXT$A;=Q@4ADyBQE zF|%6FAEAX}!uL;R0&fN)KkkJ-t1CEO>3gp_o~M1Ux>VayyNAVSKJmF(T~8nwYDL+s z)$!InUKL-3>89NwUz;8%iejR>Z||q$=wimVdW#Z@sXI^3GjgAo;HVjIw~>}s-|Da{ z1QA+NbMMpc>$?3Mn6hz&{%HF8zV{h^eF@x8@jofitB=uI`QFh8Eu44p?ywN7?X zO8#xX^Wuoz|GTiW(?E1omhN1C_HfOpuUV%U8bF>!zW?|aCHw2V{`0BWQI8b3qF;3J zX8n6$AEHPV@x%@9j{5tT!7mQYz-k=x$S;Psy4u=H$Immz=M~fL1o`Dth>=+)mTW3e zu1D&-Sd8HpkjpBAfA9ILZq@UwypI~6 zeA;HFhnd2aJ!y+14_#7ILqk#*_A7i;aMl5#r?Y$}rDFa@U^%EqrPrZLhcp`HI>CTK z9^72BS=JZ=9udkVW`eUaI-2KitzHZQ9BEQN0T$BmEutL7p|lcPJW%_g0YU_btvFXx z!EfUuMUnq-fp>D2s1tEmt;_)TB_;wk z=04N6%VzMa)qlJ>?~s&R9Nw2#z9R(LFnBRa>dkiuqm+!1w{2H9!R_PrYMS6&HZFz3 zL`K68TpbKcu>&POh79eIdt511VeaO&YqL@DsM!zu7f}v|VwEEIkp)^40M)R%mV?7U zV>0aUywT9dZ#dt^6@TQOt?dpEq!|B%bUqPEZS+CCF|BbQAo#=CmW>qcOvmBF1i0R> zcsu=O(|yh!f~xn|%BZ{EmH_wFP%zLy@;#lkhlO(v5|rNTHj#kzB{@YIabWg1N6jEd zh`#5`rKnN&732(GZE#A8%1qcP3&VJNzO3bJF?A1K?8B}g2*`%uLX_#yP{``|>wU0l zEOSxCn-$|6{wJXRlP$vCTu0O-eA=>?YFU=o{~s&Dlj^pS7%$pi$?O&!Ea)z&mN%pB z1w-0?xAlhjRNM6YS%*6F)&lY(OFaU4)z`y@+pK|-l7W(9z3Ry}wWP*61$y_6JlzSt zzZqQqTJ#spp;Sa9X7n)zT+XIGrKAPY_{eXXmN0gT*=7T#z#)ACmn%@KTqG@2WkQ|f z+?ixYc>U@sWbz8+`{o0FHFpnXQ=8zkR*tK*8AMV2IBSBgUQa6~*1WH~tn~r*mAsJ< zh%S|!z){BV2&wl(ILMW%X!}h6+n}QT;(W#KuXkEy2-(_JcsF0dO;!typXS6Tyr@l^ zZPb0{6ag<1w?pl45sN%CN=rv$!l7>Xcwut9?Tkd3DGut*W*comH?yTin-r`)&ig;( zevBoqw}ioXM56rlx?JTaebN`_NU8Sm^ZWcU;s7)eGIS8k z(GrUhXr*S0+pE$?ZqrO}?CJ6E4+7~Dsb`y}%Sn%`fCLWhK??kC+f@>;>ddg}zlmaa z83t=+Q*fwM2`(-)0Sg@TU!uVlM2S$GP~>NSeq&nWYpZx6n?=Rt1xsTFhI#=5lXIJo z6BFwvdEs9pLEey=7#5M{?+5Ol9ee&AX&QJ}x->?kQR;iYZjc?}FZF>|(&KX@tkwIh zj2-JJ4X;DMi9Iy&eG&>l6x9j7*K;u8u+zv9u0VE4fTk> zk-5EP_dQMd7(2c}&i5F6?h`rmJpTONV#)Oyq{+|^c&x}oYe^_C(I}Em@@;^ViWd5T zh;T?~5(#m`6EIfrb9#J+n?$l(ykX~*TUhfJ^H`5{=g`;lYU70blX^PCV#ccQwAvR1 ziX#>z=K@{;TAdTwi>KTls^PTJUx2#6ujw%V{o3cEF>>9e-9 zlOdkaYC)6#KLJ@crpWsJvuLq1SxQGL7@9-UOOaWUK^C$|HLBV^rL|T;H^|ra#5($V z^nmezpu`i(YtFQg>rdw`UvEl-T*jI%PC zZ2#yxt&ODTOU=qk$!h0^sD9zGpK7pt85M)8tt`ehC;-a)t;P(FvRdJCAWhj1%I>YV z-uV8_|G8}0Qo&7B{dxX^cjsPv`N=1J@v=*=8Z~lMe}o8$sEtM-41HDTPdxSb-FMx- zrK-?C6p?d>~_LCF-eLx0@6X8>Y+8XNCyoHEu+ z=9O&IamtY!H7zE{D0bl=?`iMYirW2DFSqsM@|zEJbZu{)bZDk=!9Dix(RYk8-34(~ zpnWodc}($Y)Q?3YHQ_PD2*U^>n?o`qh-qXrL7=X`@jo7Y^cQLa5E*WdJ%)7a4LAPB zZ+`pqzkcsO*+|poFkdgg0Ry>{_m2TOCk^oIeZRwl+T z>L^Bt`n312kJhfa{q`IG>-pyZyr2mbOl{uNzk2?K=Ptb9;&Z-yZa&}A2OH+hIEdB~ z(MLo`$JVPW^ta!<;l_V||NUh?P*NSBH9E7$zS-q}Ds*g#Z|lvs-nin*i_bXi%*!sj zYRKRry^4WKe|+`jaH^0MZu+nmt0#`t9HU?1kqI(nt!*%5vhhDRuPSvbJM{9W%Iz(D zren;kwwA#)MLBF@hH9vKhJsY`H`22Hz#K424t?)l`Zr36VpRVaiutN4%!#SZBL=|T zSlP)+LaJ{YYd(MeyZ1kEPaljj(kc;mJS&1?Kmg>u<*Qs`ATcdVC# z_;-UcB-vkdDb9xZ zhBx1SGfsLSvre=O7>OEw^X)hK8RWKqPbujH6cna=l653ZwE^&tI5b%XWMHN?S9n9A z?B_ztTMPc;r!F8@mhPcP9_;Gsa=P&Fp0>t~uUYfa^DjQP*IqMvg=rB5Z449z5ywKW zT-n-Xmn>bQp(AO1kT#cV4W0^XtZ>KHvcC?yP0oznC(|9Ax-hu5{MF7( z#f6WmUsURHT{|_vhzqo+wHW3tn6)g<7zX%|7H-eD$nYQ>#Ln-N@xr|v_ z;=#F>?{06L$({vI^JS}6t-SG;8>+Qg3d#h`%TelYoL(PFs_}T01r2=~ZDuuzs;a7O zYfFPiM>Q=HxdXo3Hskp6x@EdCB9XHqTlr$%JFQXOu)K z^7?ABSm@^3ZHuHEP2K~ybNx{$LVa1VCn^BJEBntS?@{!i)FpTmd9l~}JjTCs17Ywe zQ5o(D$?ypo$RL)|FDq`}vE%;x@A<{UKT~;|)lo`X|6H(!slmD!9MJAqiwqY>o^Lcl0O%HW$Ew^vM9UI+_P37$yN}sI3&TS=H6~K{ar`8atN8VRQRO)MC ziVSwOHOU5^)acTv`2~F`q|%&612s$170pO$eT9LM=YVj_W2gcH?i*=$HCHV}d*=)y zJeHJ@N-*#dO*MG!WvCXcLf`C4om@ey=HXq5-SXq={zYGvtk+=VK_Ujs48+B5CQTu) z1kzkNXWF(T@hz9jzkd3cKltITTefUYIyh_xrP(F|%rX(892Y{(C!Wn%@mkl2snO^e z8C6GAF55aJY8~Q+PweSp=tgO`l{bA*Uc0cgen}C_zU(2nLxR$DPxJ}#+4){1__2b4 zW)PXoYQviw?Gv`g>!4gN_g349=);pCYtEM__U_ezq>ZtrKdi<*0bL_qtuSx9b9xsV z(BB7+kVNi`J`GYTfnZSqBgyMF?d);mM;qE$S0eg&Q(v9;AjP?<#Iae>nWF4iYt~{F zNRDUKq24bAL9P1kd+*$M(|;~nyijOG{%K8`q=l>dB~F$-0V*wbL|Jg^PVY~z|GZm zsN-vH*s%V#AKubnFh;E2@;hZSWS=w&=JL`sUHfooidUz(hc+dLGh8UuFju7 z_|pe|d~Zj07EVQO2^y>HlmI9}BZec&S95Rue!JT8MZT9%Tj%RfVef zc(kh_%JR)hXogCHSQfiby`$wT^;sqT(bx(Um;mK)p%qXz{q=}w=}I!QR1nB%)MyZT;l=;D<+dBwtobONu>nXZ^zq$lzn30`q2=vaBu#Hr z?IP;=?NS$RURB<_ip&*@A~$GMGa zlN{7TQp^q2n6)=#6-%OED}-*7YjR`MITuSq_%!s+WK*M^;?Dck(+*O%hV%tjm;Q3 z0pg6R)~K$h4N?)-VvrD2&(DI4IaMd0^d$El#Y2B?jt#pwF>a>$gMsCM@P;ZQz|K)v znK)G+SwqMe`eV$sQ23^lfH%BI3FSon!dI=C)&ONJq_2n!fOmBYFQ;ieek`HsFgCuW zESX_|Yu+nR=7jH(bDLuSoNpEyH9oM%}8`Gc18Cs`d!D!>6sy; zYZG8HnW%M8)V{6k@mVm)Sus3;Vcx#`#l#F530U_sW}C-octq8^e1&ZK3r@TT*H@1O z3=k2BE0>u63-CK}?8eA)sKbFJ`+!Hqg@_7m0sCOxL&SPVkyy_JV)4ENd6MT+E4WjZ zcEG>^p?yiK^;ksaog4t(Z`FMAx$)D%2Om0W)Ts6A*Ec+)Qqe7WzH8+hg&ALH88a(d z{bp(Lvt3>7==i+n>YZ%byh*j9;Z>qK9P5o%D0<_a$D8CpMi`B%L~Ez~o(igcN}Vp- zT6_QS36U(Ni1jk?D6+7#T4T)k;eW(<8WufO6h_0h0)54-|N(^+b{i8`TC>OR9I)aRP`2?~XwnFXhy5S_;a`qC@)!kNBlkjnXl-r1Pa#vpwxZ~l4r-ACW` z!w1g!%6Z+xsv11iu45~{^>D{C|FWav_*7e_cvCaT^Xwb5+RPNkX7JDU*d|Y1w zN(`iXu1RyzTut8zkPH%|A?!buLkzEHR7jIh-!Uh|GC)|Qp{F#A75dfsXJtl05~8RH zHg(R9;T{7cBd}acYhw}hq>y$hKn(Qi3zjx^%-DbY?mz#>zuh=x%2bbp2MzcYPCW6H z`|tVbH@^POzVM@nSyo8BRGQ6z^vGps9R?2`e9hJ0y64UZX7`Lq^e$T}27o6~Asj;y zwQx)w&BfL&O-l@bna$AhWEj(O_>Kq}GvGEZEBWBb{%5sR8|l3Juj@*F+cQsYsWlgO z%jYK_VY!F_orDR%6*wiD3bS1hk18vhR4j_geuH}wudhd!eZrTJZ?N+!JUEV43kp>B z5m{I4yb0Xk%$x(LB?7^0Z{oVk*%xMQ4A0ooyF=&tfq3|!0}sCA_Isav>hU}8zJ2S~ zt!f-SYu4V^&Hc{ad+*ynSfQxEFlX=fV*#mi6DLmk-aq}@tFOL%<1II=T)DyvolqMd z=IqKUPQ1wCFVS#HhgYED6Y~75|8De?8*FiEr5nKKPgQ@v;`PGhIk`-YB?ViDL`PiS zw)&moy2Yg}t4p2RNbpIfXf)A0*WoT7p=UGq=vWj zpjrE(1G-ik%lr0zaN0AO%$a|7_HoCaaNqrRKjLx!y6$=2S6p`WNhhAtpOo$a@>4JX zrv7^!a>(qv?>OYq#~;4y?jP>hv7Kr?re`$*5*n`sQTfIbsEtqhSRzsf2fT*EJKdy6 z#perXT)d*u7CzIp&nfvv$ODlZzkhc80l6yeQxS`uu1XveK_-jYyn`r`MFks{mOrY^ zDg!+u?EFdaXtqVxD;(;LiRz*e?w^KMiJb6i)d)$}JoZg!(O{-D*jsCTNWGWVa6lXoSi?^ZDF`7hd$hy+1wqq%ZdO{0iU0L7TD*sd5a>S#MUqfBEcle)7PB zXP^C*o(!f@D2v|tGU~k*u~+-cRIV2-xd{YNW&oilcUlj-XvALmYGKK<1@zxpx35tK z@&lvR!O%L;<*JF?DE{8MTHWwL>5YdwbkM-#i$roBF^D<%RnSWKj{@GQ#ROG5%9c+#a}mEIk-2cyA}r}{9oFePr7 z@<*|t->C1@q;0Sr5A78IdW=d^gh{+15_ z1*J9b6{-a~9)+L6G+tjL02Pe^Ydkb}V^DDL(Pzoh6U#ybAjM5CRURXi!UNX(;xXUG zi_+`nSqL5uG)Hjg`Bl_eiOkf+@htNNCow&X#F72_WkVt+Oo>?%0nS z0gZV8unTk^Fuas?8_SPCvb6O6HD~sbhkkzc!;d_8@BMe~*s;^8-4RRz^?GdJDQ2?~ zP;UrBWUX2l+$oP0#ZOjf)@8%{rON-R>Z8RC8kK1s994d(lBSffqUH)6xMO2^=VsIt zx)>;tC30Txa|hq&@U#;u9119jU@e%Srtcc00t~-&Xts0r0ZOYplTl(`Z+iq4;w_g- zy+0PH9{6B&E8{f_&qeehf9a++0@YP9CQJh2u4$%g9I9;g+;hHm;ulW2``$aAc=C}(`a&73BfX8}pa=!U zslJl!TT8xx?SEG5{9kr{vbJogu1^vT3E+8)a826}u&n*4%j$X-Oq)93@qMwK?u{=;K3$Uc-kE|MTzs z)g8Crci;h^r``kh?W^R>L`TLc*8je9V|mA>SV)GA&m4YP>%OPA<_9t{AZ+zfVW48f8^Wb=smSOj#D2M00t z4P$;7BW2Tz*9*UY{+)N;Y=8^EyHA|2ah!ip(G?0^cir=Y-p*!bECqgonAWV;@%e;aW$2dHY^%$)#Tu^I<#>=sVKl)9P7$lv zn#SrU-v;}f+H%B|Z6l^;{7V-*%1dA9bfG4Cm4%xkd7!kY`e!aPdCGV)rJ3BZvg?ft zp8vJ4=kxjW8((qF?z;Dmn{U1z8HpIMif~r@>Q_$Vf(7p_TC!;7UbBV_8QPTCJ^9pQ z8#Zq69u1PfK7ZO~xuQgdkMC|GQq`0E8NkeDsFT0dEJwqw3k<=i|FcxC=;h+Vr#r>{ zr#-Z#Qt87FR~|9v=t?-|KHIO28#jLUAOGH>J{OG&JP);X>nHEdfA7Ep4jMFQ(5G|5 zlO|3&zyJu9@=arxmBvyD6WcwwoV?umb|w!+R9JZOAs(jmD6&S@DjDeI}3>QU<4 zUVh^jomP|MhFHy@H6= zBk8%A#?#gx{KF4dJ@M3I?K^kQoVj;;h5N*lk8RwzfmXkPJ$4${vu{~l{_w3ren4jM z?%lPNPLR;4J^WICS)Q6C0$}FM{|d4B>lPRP^wW;D3yK_rfWV_ECvD}bmCru+yK&>j zPZ&S`(|6g@pnH`t_VFhl`Cs4t+jZ;Kri+rnvzl=Pw|4EC$Deq#P%O-xF)NqL^&tt9 z`lMwt`|kU>FP?JR_U+r3EM4rHAaw+9+fg`EPNOx7clnbK%Nv%I2JW718y5M}m>&S+ z_REgkv*PGMl!d{gGkYD^GU33iFQ3a^D71efM{`4xf>0i6)YR-j=e6$fxokGaTrgby zcIk~@bZlQ&7EuUIJys#f|%<;2(MI($)KKAsWL4ypIVWm#M`*)>Pr_cULXdr+5!z)RWiDvW20H8v{ZaOo`rDE ztkKg3#l{Im*>0hJRxNfx+`a)9%zA*1pS1=mV^5sgU#isNRJ%Sdz3RjhPl=-FQ+trbix=H=>kV(e{iZ45E)@GF zl}YG_XzbpZ87H>14S}#;ZAjXVjpgTW-WfWTzY(36kN3+uZzYuDU%`;E`P@LbX^ zvZ{-~J$Q<~{7w-4C=z_5f}an&~$FIg0{RK`F^QZZF~!?=C3Gfv304$Tmp z!0L`ra{|-oB|O7#_*#XM*nLo(fgOqKIa=3UOZ&TkbZY| zntgrr(dt`nzwyNvpOvNww;L}v!&p!9t+kSBK#~-gT z9KGa>HXC=9hzdp-H)T$K&pEkF3rLA#*MH;Z9cvdBn(KI@3mfI!w4?JA56xz?5i|T9 zpOhB;w(!xrMd^iEs@&4@w$G%s5_J1_30WYu8vF4_<_#XK>$~>$_6L4+@6UhnlR}}( zVlGrDg;T+}`awPBsN=4@Vy*`X^VgMEU9xD&0`^_3k=}<6gR?@q!;_qwdPK|Q+1YH) zDZZZ5n~?SL6bs;Ehr9l#av48-qqL-lXHB0gg4S#Y>mXUyvZCxNPV}i?JR^-0y(?Zj zckX!LNB31^saPx~2m=JBVTeL7jp*l?V=7W~?YJ>xKeeaZvSssSS6;MX!v^sPXpwg& zkYAd0E%AF=250sL+*$yXR*PH@WE`R9GTrG?{n<#PE~^M3hvfA`maeD&pW z&GGO{1LX8PAo(0So)!5Qf}5^bx%{yw9_j4toHcWA6^?oGsVaWQbvdCvPaOYFQchTQ zo0gY8db`*t%dh(8k|m3hhP=6us!<$p-~YMyqny)jLJ}m;T{R@NL_softg=u8M|+m;L$yv?3&V|d0m@UmRztKGYx#Hcp!F^ z&0P)=&teJ%RDC`6=&y$kt|2=W19jUEZh7wc-}QnmyGhwqfJLc{rca-7-P~{ca7-m? zsO3zWNb6u?=ehrycu>`WZy6MYHZ$;?c7$8(D8K*v;_{cf%mx$C=Bp(MIyEF1w&y*! zQ}La@dhYoZ*Ip91_t$IPx{rTw=k3ot^J|6bx3?;QDsB$mS^HCkhOhqC0S6q^-`c18 zn!n(^TW|gTJM-UhnB)@xFz_0bejoFzIX_i1A!>dQjGUbLXnwJ~X$Lhwx*jKH-Q**3 zQxDJA;2qEZ>fByl{9NI~*SpefWoY}F(Go_X57l?Zz-eun{^5W9)6qvC+uq*(qo3aY zv!DH_P$&ddiJd?RLFrC=NHn<>>31D>%n4UsF?Ymlpr7vvYfsyQ?^Tp#Rdw!YuLHZMP9u zUOu;?)tv)R!6}e>_0=*sntVuAl5;JvV}0qpd4-Md7mXwsjIE~c@@+$bO{FUp z%PU_kE`7dG>?{X|ho!R?+^9>bNHL(jJD%;0)Mm4p1NZ;@l4Xm#%RbQ?n?yWBoAeda z)cVG&hk?Qoh%*wY=zeg*q8*+CV9o&c|lh#9Yw7 zCTj8I6Tf)*Wmk?IxqGjJyCq8(-*n6MZ@u+q(9wpp0s~=&=VkAgr*R}|diPd<9jy?1WgwlzukCEA$i#wI4^(}y+HU+z9VGwbA*0mJzOl~U0yd$H?- zmkQ;gYZ(aBPiPsxe*}&XD%i0R=g;fhu&ij!$#s4!u@22+`Z^eSV8uFk*o7a@1`5cU z@E8=VqUHtmeZ(+)bf|A zr2u-i9#VuqkT7$w*$^P{>snh{FFgOEbI&=i+2K2zwrsxp?jJn))Z^7`ceA=lMX5u1 zttQY|Tk>)0)alpE{r15J9-RI$pMUNOuGSs+7nK_82v(ntFH0?w+=W09bu zT%xR-IwwD6PA;40*1CNQu3uIfw_m3ECk8aPV|6;^tJRuN8*x$#T~m)dcj<9z&3QNL z?-FjvUfFaflQHpAuhpVyvjR6XU_O`50j$YQ>ShlanFicLmuqt~j%(TDz)USbu!oJU znO|D;+phL4r6l8KR2wDL;%g*+$e6{F=F6BCW%TH=*Iaeo(MKPfX7wum&SQ@~eAm4{ z+_`gycvwGbvtxEu>SAEO*ZD5#ASjD`p;%Sp>%*!!fG(X%qfjQRM8l}qK-Z&KN4pkwojQUGM(nk#jrlMWk3efmjImCNrv;DCd#oBN$T_na1Z z(@QV^;g;LJziQRWP$-(9v+6{N{wlI291DwR8l2J7#FeP_D_=VI>lc1gB^_9^cFhm( zxb=6>J`<9+Q2NMHcFuqBfYA2Ji&qa9R)lu%Q}TmGW%Q;-hZ{dAEtuD}ZCzR33>EV< zkiBNY(^bRRc$B9Bk3C^_kPEX3S3j($!_6XEESxfx(_GuH#ahZk14!V&>>ZRjdRBJk z7xGmW?7ARyZZ9u=uCVg;f`vQimo>c`2oeOi)KP#A-lc)hD*t!J8E0K`(PhJi4QJo7 za^;GfZvFl%ue_9G_R~~EI%EB<k;!BmNr@l*_`Y8} z{IgQ2h*~2i1?DhVBrGkfs>o0>o`aW=^^cVm#)c1jmD*%>Zu(KVY-{9O-JP4ti+)>J zyP%NHQ*ps`xw#a!x@?L>wp%1@z5ue&A|(pV+X@sOU7DS4t_V(ek=|1%_P;mydg#q5 zssy5}n{;S?+`ic;!jBi1-v7PK)+hAXStKlq>!hxIp=#dnFHL=&3>N~S zt+nIs@QF$_vMXh#Oa`7t0ABml?z@kkJ$p`Pd&lozdSUza?Wyx!iJq)${@5oPPG(>G zYtWB4?asMi0kFQ^a@cmlZG*N>DsO&k{qSq0(AyKl|zF?2|ruP||O=Tdh~ z5W^dCYZJPf6ZuQh^hMZ4JCx?v7CqIsV~~nNmbQTm84$xc8K(tBRs`ycUCPQ#%MO_u zQxA^70s&d1nLzlH;vA-dKCI1kR<%S5-5T~p6lYDT812pQ?+}GodW12<(Z1f4iO#K%dw;6c+A z^e)V1dDeQ(N#`3shZt!2yvKN^wS2QTj10BKMA{4O>ZA!E@ke?=FN%+6k2phD3RHfPdV1MudKZYWs#G7^^^LLi4U+|I41(A))on!3e(766eN%eghAtHW zG0-!tn^M$PPt~44*^C=%myBC5jt7R!y7IK3K1_T?7K&Nlmxl33DlP1BU*|?~H}X({GHakV;_=Y%_NS}u}~N7T)>KaW5g?(PrBq$fyWNOJ~Q z$+rL%=L{?fisB~#dQS%CjTmyT_=p7zTvI&;q|F;#8b?V6QCL3=m^}9+HWy$Bu0OpD z)N$JlcLvk&GGU5%g1*z!j9`Vs1TIMMT_DNjvNaSs6}t!n2&wTMFz5kM3#LF~Kw`Em zNbo@uV64fE?o=u>RXoUvtsW+AH`q!a6a{T5NM{=uWZpLm7f{;~aL6zmHZlU@o*p-3Asr_EsjXOOaEz(0v?LWjKS(e&&FG4~WM04A7EJUI#WI59PFb1_<6aD%>EnqIVlb#g zLmIUKo}l_pNW)T0H07=di9{QTQY)Y_aN;6FmoU4J1L0b{4OTqGFs7dQyS}UdqDx=+ z2B!A}eDreDFHCWC8^91!#j3wwq~x5IKaay10R8FIR4LtQ48>3kZ^Zz9hZGp*-wVnB zNy{if%-aSkY7PR&c6NqAwXYEJ$h<_5oLh>hSR~K81O~>`Z_x8DRR(1}y`zPevIG@T z)s+IxU@jZUilC!6AJT5>AxfJn(jX3K`e4z&CCZ5vFi}D^=~Tw+7Rmf3UtBa~nuc;7 zJPEziFSg?k)FsqzAiQCz_CBC7fw&%!;4@N+8*jzYER;iJJ~3S0Xf)P#1J$<>QL?$4(Ilbt=6E>0Q<%T%_(zliDEl z%Q`zC&+w30383=T6DWhZ2uHHy_FT zruK!JQ&_Fo}8m`AwKaals zQSb6`P=u(M7^qA1+4C2`Xjt^xs&AKG9oku)uohPx*yGAn#7BM$mGg zfd19oX#)BWR32)kH_{=VP)x^s0btABqojEV21*!$D9}1B9I!DUGi6a5m;=&-i0kR> z=F^nW18PDmPnwSeBfCk>Tnk4IO((*COhnW&_tt^IVAY}ZykILqr8a}=uM{nSS~>_4 z5L*;~1TAC%4Q%3J1cy+`nu3rBX(9*%M@G>e$SbrXqwGunVu=pck`7wTOC%K&8tDO4XesHm zfF(vXApi)+%`EHBdkmAw)DrpulAgj!M1su9>C0QkL(xRaN%2HuQa$(pY&y`z&=(B@ zjy{$o&2k)BgNpW|;)d5xa-g#Xlr83l(?=?hA2zgn1gB}q9-$K5ho&J<%#~pZ6OO@i z0AYk)bGjku#mDoejYyxnDZawB;P^%^MQcDnMcaUy2!c>3VAA^ltv3d$GMU>c%2Syc zL{vMU9I1f{*mbbu&_HU&h4iCnHb5<6MON0=b9Cs`Z>!uM?BGs>l%Q%H>9l+I!)qZ$ zF$9Q`eQDebXF2v-k}~qaq!B~<82qP2vWUL-%|ao%aTWjsRMydKW>T~mFjZB%Pmtko z!sU1;8H#uuIMl%HD96`(*sTsS~m4efab<1Rr1FTq}4;co)nR2_sO zNdt7D*iFc>l0l2@0}BECBsGg8_!U4I0c_>*oSuWE|4Nq)2Y>+HEJGDc!tA+4L{!&M zne8F47{BXF(r2NKMj$iQC7Jzc1HLs4lacBgO@N4QEaGZ zLCatQsjaL3PF9HnGQV5(G0|z6fUx6&8qUL=4L-XRb*XcD5jBh7dH*a5*o8?b+9)hX zL&Q75JH~!TM{Pp@L=TuIF^!^MG{P233&$-Vpc*;|E}wi4XccoHO+_&4j|3R?i%ylM z*QE5BQS6d5!c)1R9_;vSn@T^N3)GkS#s*Xz$cu&jR(j95LA8F^2tRowVsXRoXZm4F0txKj#FNDt_!EX68tE4s z$DU0WDhM8|rt32i8pTUM1t0g2dJi*>Q7l4O3ZF9Y5g&TaMB2{>AsZuE&?`Ybz2f4&EQ#%s3D*&=6zUMcV=@YnNIdwf?|YG9)v< zP5H>YMmr%;$sL_wK1_>j#`a@NEYoPrv>~{ zMp7e^GiLIQ2%Xlh=#>hB%O2_LyjfkbagGfak8T^wsW8*p!fX~f6b&+Uh(zyx+DBU{ z7<=|Pw$%>_N@ic*P6&1$0fT`Of(gvz>W}X1V*3XDqQ+O%+=m!LVHf(Wuyz|u2c?jn z|Di8$@jV~Ik&k;hDN~Kx`R9xgCK&-tJ)+VJwzLZ!1|?${YJa8H3AIy9YZ+v)ykmZs z8AkCwth%r;qRvn#v_(Q=18PF zb9V##z5>l3AInkO!rRe}%&hhSRaLlyrorN=N0v{+0Q=lfi2=10gAdZz1KBQL%-eT{_IkcZIwYV(p01R6WU1-jR?YxQ@Y%m_l5 zZ6Hj+j!EpHmpXd7a4gMO8c)ESxgnCoG<)$*`C1FS)rMYQL2; zowvmlW|FkG!{}ia90_eGB%bF5*EXbP1INA51+93FsUhif3yI)K4%yrU429j}cEHa&E{21a2U@1#4%c^jQ;WPjqT7;rrBmXd~NoNaT$n!EV`GyAFIWn zVE})A=~$DrIpgb3DZH^9t7`2V5#dNFbKJbH&xBGAkV3-vlZafRdl;G7JB;I9lpfOs zrvTNo-Q`1GY0YI2dT;wMxjearD-7mV^BzW=ZXCDhs=|icq);w#Rv>R7m-e)%h1UnXgTrr+#7mh`ea$$ zNDv+40qb!daN!JfTX z90>D%xf)A6Jc@VTc)dqGx`K{e?!^r8i|d#)O~0aTY^-4v#6DuAx{uRN6QGnz&Zd;q zCdMk$e2i3PVX{~)whtW2&4!m_;Yy@KJY+VlXpDoP$fT=v+)LAuONKePraUyNT+s)f zOzndPQO#7R^Ak_>v3o~;Fpc5CB7Tt~FDx6*FCYYqda767XBW)? zY<48x!O(4j!By<*=iV$#wny(FEmQb7yCr+CK2`y_i|g!7gB*-tQYiNC-jn%``;b>} ztB*#UHQTLIVS;FXQqYOzq0eSIWq&@H{mhNJzMBK1ww7o|Db^3<>lp~-N*0Inz+QjR zj4&vb^(c>1d^b8!vhE;me}n1{$${jrxZ27q;fmG{>T|vc$6&%oCATS@AwJ#!NM1&O q#EPV>1xIuiLke{>{QKjN00RJb(RtA=4#STC0000= 0.4.0 && < 0.9.0" + }, + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "_id": "ansi@0.0.4", + "dependencies": {}, + "optionalDependencies": {}, + "_engineSupported": true, + "_npmVersion": "1.1.8", + "_nodeVersion": "v0.7.7-pre", + "_defaultsLoaded": true, + "_from": "ansi@0.0.x" +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/ansi/server.js b/deps/npm/node_modules/node-gyp/node_modules/ansi/server.js new file mode 100644 index 0000000000..452bc521a6 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/ansi/server.js @@ -0,0 +1,51 @@ +var http = require('http') + , ansi = require('./') + , Canvas = require('canvas') + , Image = Canvas.Image + , imageFile = process.argv[2] || __dirname + '/examples/yoshi.png' + , image = require('fs').readFileSync(imageFile) + +var img = new Image() +img.src = image + +var server = http.createServer(function (req, res) { + draw(res); +}) + +server.listen(8080, function () { + console.error('HTTP server listening on:', this.address()) +}) + +function draw (stream) { + var cursor = ansi(stream) + , pixel = ' ' + , width = img.width + , scaleW = img.width > width ? width / img.width : 1 + , w = Math.floor(img.width * scaleW) + , h = Math.floor(img.height * scaleW); + + var canvas = new Canvas(w, h) + , ctx = canvas.getContext('2d'); + + ctx.drawImage(img, 0, 0, w, h); + + var data = ctx.getImageData(0, 0, w, h).data; + + for (var i=0, l=data.length; i 0) { + cursor.bg.rgb(r, g, b); + } else { + cursor.bg.reset(); + } + stream.write(pixel); + if ((i/4|0) % w === (w-1)) { + cursor.bg.reset(); + stream.write('\n'); + } + } + stream.end(); +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/.npmignore b/deps/npm/node_modules/node-gyp/node_modules/glob/.npmignore new file mode 100644 index 0000000000..2af4b71c93 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/.npmignore @@ -0,0 +1,2 @@ +.*.swp +test/a/ diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/.travis.yml b/deps/npm/node_modules/node-gyp/node_modules/glob/.travis.yml new file mode 100644 index 0000000000..94cd7f6ba2 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.6 + - 0.7 diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/README.md b/deps/npm/node_modules/node-gyp/node_modules/glob/README.md new file mode 100644 index 0000000000..6e27df6206 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/README.md @@ -0,0 +1,233 @@ +# Glob + +This is a glob implementation in JavaScript. It uses the `minimatch` +library to do its matching. + +## Attention: node-glob users! + +The API has changed dramatically between 2.x and 3.x. This library is +now 100% JavaScript, and the integer flags have been replaced with an +options object. + +Also, there's an event emitter class, proper tests, and all the other +things you've come to expect from node modules. + +And best of all, no compilation! + +## Usage + +```javascript +var glob = require("glob") + +// options is optional +glob("**/*.js", options, function (er, files) { + // files is an array of filenames. + // If the `nonull` option is set, and nothing + // was found, then files is ["**/*.js"] + // er is an error object or null. +}) +``` + +## Features + +Please see the [minimatch +documentation](https://github.com/isaacs/minimatch) for more details. + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` +* [minimatch documentation](https://github.com/isaacs/minimatch) + +## glob(pattern, [options], cb) + +* `pattern` {String} Pattern to be matched +* `options` {Object} +* `cb` {Function} + * `err` {Error | null} + * `matches` {Array} filenames found matching the pattern + +Perform an asynchronous glob search. + +## glob.sync(pattern, [options] + +* `pattern` {String} Pattern to be matched +* `options` {Object} +* return: {Array} filenames found matching the pattern + +Perform a synchronous glob search. + +## Class: glob.Glob + +Create a Glob object by instanting the `glob.Glob` class. + +```javascript +var Glob = require("glob").Glob +var mg = new Glob(pattern, options, cb) +``` + +It's an EventEmitter, and starts walking the filesystem to find matches +immediately. + +### new glob.Glob(pattern, [options], [cb]) + +* `pattern` {String} pattern to search for +* `options` {Object} +* `cb` {Function} Called when an error occurs, or matches are found + * `err` {Error | null} + * `matches` {Array} filenames found matching the pattern + +Note that if the `sync` flag is set in the options, then matches will +be immediately available on the `g.found` member. + +### Properties + +* `minimatch` The minimatch object that the glob uses. +* `options` The options object passed in. +* `error` The error encountered. When an error is encountered, the + glob object is in an undefined state, and should be discarded. +* `aborted` Boolean which is set to true when calling `abort()`. There + is no way at this time to continue a glob search after aborting, but + you can re-use the statCache to avoid having to duplicate syscalls. + +### Events + +* `end` When the matching is finished, this is emitted with all the + matches found. If the `nonull` option is set, and no match was found, + then the `matches` list contains the original pattern. The matches + are sorted, unless the `nosort` flag is set. +* `match` Every time a match is found, this is emitted with the matched. +* `error` Emitted when an unexpected error is encountered, or whenever + any fs error occurs if `options.strict` is set. +* `abort` When `abort()` is called, this event is raised. + +### Methods + +* `abort` Stop the search. + +### Options + +All the options that can be passed to Minimatch can also be passed to +Glob to change pattern matching behavior. Also, some have been added, +or have glob-specific ramifications. + +All options are false by default, unless otherwise noted. + +All options are added to the glob object, as well. + +* `cwd` The current working directory in which to search. Defaults + to `process.cwd()`. +* `root` The place where patterns starting with `/` will be mounted + onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix + systems, and `C:\` or some such on Windows.) +* `nomount` By default, a pattern starting with a forward-slash will be + "mounted" onto the root setting, so that a valid filesystem path is + returned. Set this flag to disable that behavior. +* `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. +* `nosort` Don't sort the results. +* `stat` Set to true to stat *all* results. This reduces performance + somewhat, and is completely unnecessary, unless `readdir` is presumed + to be an untrustworthy indicator of file existence. It will cause + ELOOP to be triggered one level sooner in the case of cyclical + symbolic links. +* `silent` When an unusual error is encountered + when attempting to read a directory, a warning will be printed to + stderr. Set the `silent` option to true to suppress these warnings. +* `strict` When an unusual error is encountered + when attempting to read a directory, the process will just continue on + in search of other matches. Set the `strict` option to raise an error + in these cases. +* `statCache` A cache of results of filesystem information, to prevent + unnecessary stat calls. While it should not normally be necessary to + set this, you may pass the statCache from one glob() call to the + options object of another, if you know that the filesystem will not + change between calls. (See "Race Conditions" below.) +* `sync` Perform a synchronous glob search. +* `nounique` In some cases, brace-expanded patterns can result in the + same file showing up multiple times in the result set. By default, + this implementation prevents duplicates in the result set. + Set this flag to disable that behavior. +* `nonull` Set to never return an empty set, instead returning a set + containing the pattern itself. This is the default in glob(3). +* `nocase` Perform a case-insensitive match. Note that case-insensitive + filesystems will sometimes result in glob returning results that are + case-insensitively matched anyway, since readdir and stat will not + raise an error. +* `debug` Set to enable debug logging in minimatch and glob. +* `globDebug` Set to enable debug logging in glob, but not minimatch. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between node-glob and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. **Note that this is different from the way that `**` is +handled by ruby's `Dir` class.** + +If an escaped pattern has no matches, and the `nonull` flag is set, +then glob returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will always +be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto the +root setting using `path.join`. On windows, this will by default result +in `/foo/*` matching `C:\foo\bar.txt`. + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race conditions, +since it relies on directory walking and such. + +As a result, it is possible that a file that exists when glob looks for +it may have been deleted or modified by the time it returns the result. + +As part of its internal implementation, this program caches all stat +and readdir calls that it makes, in order to cut down on system +overhead. However, this also makes it even more susceptible to races, +especially if the statCache object is reused between glob calls. + +Users are thus advised not to use a glob result as a +guarantee of filesystem state in the face of rapid changes. +For the vast majority of operations, this is never a problem. diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/glob.js b/deps/npm/node_modules/node-gyp/node_modules/glob/glob.js new file mode 100644 index 0000000000..48262b59c3 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/glob.js @@ -0,0 +1,542 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// readdir(PREFIX) as ENTRIES +// If fails, END +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $]) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $]) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n+1] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + + + +module.exports = glob + +var fs = require("graceful-fs") +, minimatch = require("minimatch") +, Minimatch = minimatch.Minimatch +, inherits = require("inherits") +, EE = require("events").EventEmitter +, path = require("path") +, isDir = {} +, assert = require("assert").ok + +function glob (pattern, options, cb) { + if (typeof options === "function") cb = options, options = {} + if (!options) options = {} + + if (typeof options === "number") { + deprecated() + return + } + + var g = new Glob(pattern, options, cb) + return g.sync ? g.found : g +} + +glob.fnmatch = deprecated + +function deprecated () { + throw new Error("glob's interface has changed. Please see the docs.") +} + +glob.sync = globSync +function globSync (pattern, options) { + if (typeof options === "number") { + deprecated() + return + } + + options = options || {} + options.sync = true + return glob(pattern, options) +} + + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (!(this instanceof Glob)) { + return new Glob(pattern, options, cb) + } + + if (typeof cb === "function") { + this.on("error", cb) + this.on("end", function (matches) { + // console.error("cb with matches", matches) + cb(null, matches) + }) + } + + options = options || {} + + this.maxDepth = options.maxDepth || 1000 + this.maxLength = options.maxLength || Infinity + this.statCache = options.statCache || {} + + this.changedCwd = false + var cwd = process.cwd() + if (!options.hasOwnProperty("cwd")) this.cwd = cwd + else { + this.cwd = options.cwd + this.changedCwd = path.resolve(options.cwd) !== cwd + } + + this.root = options.root || path.resolve(this.cwd, "/") + this.root = path.resolve(this.root) + + this.nomount = !!options.nomount + + if (!pattern) { + throw new Error("must provide pattern") + } + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + this.dot = !!options.dot + this.mark = !!options.mark + this.sync = !!options.sync + this.nounique = !!options.nounique + this.nonull = !!options.nonull + this.nosort = !!options.nosort + this.nocase = !!options.nocase + this.stat = !!options.stat + this.debug = !!options.debug || !!options.globDebug + this.silent = !!options.silent + + var mm = this.minimatch = new Minimatch(pattern, options) + this.options = mm.options + pattern = this.pattern = mm.pattern + + this.error = null + this.aborted = false + + EE.call(this) + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + this.minimatch.set.forEach(iterator.bind(this)) + function iterator (pattern, i, set) { + this._process(pattern, 0, i, function (er) { + if (er) this.emit("error", er) + if (-- n <= 0) this._finish() + }.bind(this)) + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + + var nou = this.nounique + , all = nou ? [] : {} + + for (var i = 0, l = this.matches.length; i < l; i ++) { + var matches = this.matches[i] + if (this.debug) console.error("matches[%d] =", i, matches) + // do like the shell, and spit out the literal glob + if (!matches) { + if (this.nonull) { + var literal = this.minimatch.globSet[i] + if (nou) all.push(literal) + else nou[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) all.push.apply(all, m) + else m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) all = Object.keys(all) + + if (!this.nosort) { + all = all.sort(this.nocase ? alphasorti : alphasort) + } + + if (this.mark) { + // at *some* point we statted all of these + all = all.map(function (m) { + var sc = this.statCache[m] + if (!sc) return m + if (m.slice(-1) !== "/" && (Array.isArray(sc) || sc === 2)) { + return m + "/" + } + if (m.slice(-1) === "/") { + return m.replace(/\/$/, "") + } + return m + }) + } + + if (this.debug) console.error("emitting end", all) + + this.found = all + this.emit("end", all) +} + +function alphasorti (a, b) { + a = a.toLowerCase() + b = b.toLowerCase() + return alphasort(a, b) +} + +function alphasort (a, b) { + return a > b ? 1 : a < b ? -1 : 0 +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit("abort") +} + + +Glob.prototype._process = function (pattern, depth, index, cb) { + assert(this instanceof Glob) + cb = cb.bind(this) + if (this.aborted) return cb() + + if (depth > this.maxDepth) return cb() + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === "string") { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + prefix = pattern.join("/") + this._stat(prefix, function (exists, isDir) { + // either it's there, or it isn't. + // nothing more to do, either way. + if (exists) { + if (prefix.charAt(0) === "/" && !this.nomount) { + prefix = path.join(this.root, prefix) + } + this.matches[index] = this.matches[index] || {} + this.matches[index][prefix] = true + this.emit("match", prefix) + } + return cb() + }) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's "absolute" like /foo/bar, + // or "relative" like "../baz" + prefix = pattern.slice(0, n) + prefix = prefix.join("/") + break + } + + // get the list of entries. + var read + if (prefix === null) read = "." + else if (isAbsolute(prefix)) { + read = prefix = path.join("/", prefix) + if (this.debug) console.error('absolute: ', prefix, this.root, pattern) + } else read = prefix + + if (this.debug) console.error('readdir(%j)', read, this.cwd, this.root) + return this._readdir(read, function (er, entries) { + if (er) { + // not a directory! + // this means that, whatever else comes after this, it can never match + return cb() + } + + // globstar is special + if (pattern[n] === minimatch.GLOBSTAR) { + // test without the globstar, and with every child both below + // and replacing the globstar. + var s = [ pattern.slice(0, n).concat(pattern.slice(n + 1)) ] + entries.forEach(function (e) { + if (e.charAt(0) === "." && !this.dot) return + // instead of the globstar + s.push(pattern.slice(0, n).concat(e).concat(pattern.slice(n + 1))) + // below the globstar + s.push(pattern.slice(0, n).concat(e).concat(pattern.slice(n))) + }, this) + + // now asyncForEach over this + var l = s.length + , errState = null + s.forEach(function (gsPattern) { + this._process(gsPattern, depth + 1, index, function (er) { + if (errState) return + if (er) return cb(errState = er) + if (--l <= 0) return cb() + }) + }, this) + + return + } + + // not a globstar + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = pattern[n] + if (typeof pn === "string") { + var found = entries.indexOf(pn) !== -1 + entries = found ? entries[pn] : [] + } else { + var rawGlob = pattern[n]._glob + , dotOk = this.dot || rawGlob.charAt(0) === "." + + entries = entries.filter(function (e) { + return (e.charAt(0) !== "." || dotOk) && + (typeof pattern[n] === "string" && e === pattern[n] || + e.match(pattern[n])) + }) + } + + // If n === pattern.length - 1, then there's no need for the extra stat + // *unless* the user has specified "mark" or "stat" explicitly. + // We know that they exist, since the readdir returned them. + if (n === pattern.length - 1 && + !this.mark && + !this.stat) { + entries.forEach(function (e) { + if (prefix) { + if (prefix !== "/") e = prefix + "/" + e + else e = prefix + e + } + if (e.charAt(0) === "/" && !this.nomount) { + e = path.join(this.root, e) + } + + this.matches[index] = this.matches[index] || {} + this.matches[index][e] = true + this.emit("match", e) + }, this) + return cb.call(this) + } + + + // now test all the remaining entries as stand-ins for that part + // of the pattern. + var l = entries.length + , errState = null + if (l === 0) return cb() // no matches possible + entries.forEach(function (e) { + var p = pattern.slice(0, n).concat(e).concat(pattern.slice(n + 1)) + this._process(p, depth + 1, index, function (er) { + if (errState) return + if (er) return cb(errState = er) + if (--l === 0) return cb.call(this) + }.bind(this)) + }, this) + }) + +} + +Glob.prototype._stat = function (f, cb) { + assert(this instanceof Glob) + var abs = f + if (f.charAt(0) === "/") { + abs = path.join(this.root, f) + } else if (this.changedCwd) { + abs = path.resolve(this.cwd, f) + } + if (this.debug) console.error('stat', [this.cwd, f, '=', abs]) + if (f.length > this.maxLength) { + var er = new Error("Path name too long") + er.code = "ENAMETOOLONG" + er.path = f + return this._afterStat(f, abs, cb, er) + } + + if (this.statCache.hasOwnProperty(f)) { + var exists = this.statCache[f] + , isDir = exists && (Array.isArray(exists) || exists === 2) + if (this.sync) return cb.call(this, !!exists, isDir) + return process.nextTick(cb.bind(this, !!exists, isDir)) + } + + if (this.sync) { + var er, stat + try { + stat = fs.statSync(abs) + } catch (e) { + er = e + } + this._afterStat(f, abs, cb, er, stat) + } else { + fs.stat(abs, this._afterStat.bind(this, f, abs, cb)) + } +} + +Glob.prototype._afterStat = function (f, abs, cb, er, stat) { + assert(this instanceof Glob) + if (er || !stat) { + exists = false + } else { + exists = stat.isDirectory() ? 2 : 1 + } + this.statCache[f] = this.statCache[f] || exists + cb.call(this, !!exists, exists === 2) +} + +Glob.prototype._readdir = function (f, cb) { + assert(this instanceof Glob) + var abs = f + if (f.charAt(0) === "/") { + abs = path.join(this.root, f) + } else if (isAbsolute(f)) { + abs = f + } else if (this.changedCwd) { + abs = path.resolve(this.cwd, f) + } + + if (this.debug) console.error('readdir', [this.cwd, f, abs]) + if (f.length > this.maxLength) { + var er = new Error("Path name too long") + er.code = "ENAMETOOLONG" + er.path = f + return this._afterReaddir(f, abs, cb, er) + } + + if (this.statCache.hasOwnProperty(f)) { + var c = this.statCache[f] + if (Array.isArray(c)) { + if (this.sync) return cb.call(this, null, c) + return process.nextTick(cb.bind(this, null, c)) + } + + if (!c || c === 1) { + // either ENOENT or ENOTDIR + var code = c ? "ENOTDIR" : "ENOENT" + , er = new Error((c ? "Not a directory" : "Not found") + ": " + f) + er.path = f + er.code = code + if (this.debug) console.error(f, er) + if (this.sync) return cb.call(this, er) + return process.nextTick(cb.bind(this, er)) + } + + // at this point, c === 2, meaning it's a dir, but we haven't + // had to read it yet, or c === true, meaning it's *something* + // but we don't have any idea what. Need to read it, either way. + } + + if (this.sync) { + var er, entries + try { + entries = fs.readdirSync(abs) + } catch (e) { + er = e + } + return this._afterReaddir(f, abs, cb, er, entries) + } + + fs.readdir(abs, this._afterReaddir.bind(this, f, abs, cb)) +} + +Glob.prototype._afterReaddir = function (f, abs, cb, er, entries) { + assert(this instanceof Glob) + if (entries && !er) { + this.statCache[f] = entries + // if we haven't asked to stat everything for suresies, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. This also gets us one step + // further into ELOOP territory. + if (!this.mark && !this.stat) { + entries.forEach(function (e) { + if (f === "/") e = f + e + else e = f + "/" + e + this.statCache[e] = true + }, this) + } + + return cb.call(this, er, entries) + } + + // now handle errors, and cache the information + if (er) switch (er.code) { + case "ENOTDIR": // totally normal. means it *does* exist. + this.statCache[f] = 1 + return cb.call(this, er) + case "ENOENT": // not terribly unusual + case "ELOOP": + case "ENAMETOOLONG": + case "UNKNOWN": + this.statCache[f] = false + return cb.call(this, er) + default: // some unusual error. Treat as failure. + this.statCache[f] = false + if (this.strict) this.emit("error", er) + if (!this.silent) console.error("glob error", er) + return cb.call(this, er) + } +} + +var isAbsolute = process.platform === "win32" ? absWin : absUnix + +function absWin (p) { + if (absUnix(p)) return true + // pull off the device/UNC bit from a windows path. + // from node's lib/path.js + var splitDeviceRe = + /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?([\\\/])?/ + , result = splitDeviceRe.exec(p) + , device = result[1] || '' + , isUnc = device && device.charAt(1) !== ':' + , isAbsolute = !!result[2] || isUnc // UNC paths are always absolute + + return isAbsolute +} + +function absUnix (p) { + return p.charAt(0) === "/" || p === "" +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/.travis.yml b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/.travis.yml new file mode 100644 index 0000000000..f1d0f13c8a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.4 + - 0.6 diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/LICENSE new file mode 100644 index 0000000000..05a4010949 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/LICENSE @@ -0,0 +1,23 @@ +Copyright 2009, 2010, 2011 Isaac Z. Schlueter. +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/README.md b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/README.md new file mode 100644 index 0000000000..d5f97234cd --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/README.md @@ -0,0 +1,212 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.png)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +Eventually, it will replace the C binding in node-glob. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +### Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. **Note that this is different from the way that `**` is +handled by ruby's `Dir` class.** + +If an escaped pattern has no matches, and the `null` flag is not set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + + +## Minimatch Class + +Create a minimatch object by instanting the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +## Functions + +The top-level exported function has a `cache` property, which is an LRU +cache set to store 100 items. So, calling these methods repeatedly +with the same pattern and options will use the same Minimatch object, +saving the cost of parsing it multiple times. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, then +return the pattern (unless `{ null: true }` in the options.) + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself. When set, an empty list is returned if there are +no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/minimatch.js b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/minimatch.js new file mode 100644 index 0000000000..737c82e719 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/minimatch.js @@ -0,0 +1,980 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var LRU = require("lru-cache") + , cache = minimatch.cache = new LRU(100) + , GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} + , pathSplit = process.platform === "win32" ? /\\|\// : "/" + +var path = require("path") + // any single thing other than / + // don't need to escape / when using new RegExp() + , qmark = "[^/]" + + // * => any number of characters + , star = qmark + "*?" + + // ** when dots are allowed. Anything goes, except .. and . + // not (^ or / followed by one or two dots followed by $ or /), + // followed by anything, any number of times. + , twoStarDot = "(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?" + + // not a ^ or / followed by a dot, + // followed by anything, any number of times. + , twoStarNoDot = "(?:(?!(?:\\\/|^)\\.).)*?" + + // characters that need to be escaped in RegExp. + , reSpecials = charSet("().*{}+?[]^$\\!") + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split("").reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.monkeyPatch = monkeyPatch +function monkeyPatch () { + var desc = Object.getOwnPropertyDescriptor(String.prototype, "match") + var orig = desc.value + desc.value = function (p) { + if (p instanceof Minimatch) return p.match(this) + return orig.call(this, p) + } + Object.defineProperty(String.prototype, desc) +} + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== "string") { + throw new TypeError("glob pattern string required") + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === "#") { + return false + } + + // "" only matches "" + if (pattern.trim() === "") return p === "" + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options, cache) + } + + if (typeof pattern !== "string") { + throw new TypeError("glob pattern string required") + } + + if (!options) options = {} + pattern = pattern.trim() + + // lru storage. + // these things aren't particularly big, but walking down the string + // and turning it into a regexp can get pretty costly. + var cacheKey = pattern + "\n" + Object.keys(options).filter(function (k) { + return options[k] + }).join(":") + var cached = minimatch.cache.get(cacheKey) + if (cached) return cached + minimatch.cache.set(cacheKey, this) + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === "#") { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) console.error(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + if (options.debug) console.error(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + if (options.debug) console.error(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return -1 === s.indexOf(false) + }) + + if (options.debug) console.error(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + , negate = false + , options = this.options + , negateOffset = 0 + + if (options.nonegate) return + + for ( var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === "!" + ; i ++) { + negate = !negate + negateOffset ++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return new Minimatch(pattern, options).braceExpand() +} + +Minimatch.prototype.braceExpand = braceExpand +function braceExpand (pattern, options) { + options = options || this.options + pattern = typeof pattern === "undefined" + ? this.pattern : pattern + + if (typeof pattern === "undefined") { + throw new Error("undefined pattern") + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + var escaping = false + + // examples and comments refer to this crazy pattern: + // a{b,c{d,e},{f,g}h}x{y,z} + // expected: + // abxy + // abxz + // acdxy + // acdxz + // acexy + // acexz + // afhxy + // afhxz + // aghxy + // aghxz + + // everything before the first \{ is just a prefix. + // So, we pluck that off, and work with the rest, + // and then prepend it to everything we find. + if (pattern.charAt(0) !== "{") { + // console.error(pattern) + var prefix = null + for (var i = 0, l = pattern.length; i < l; i ++) { + var c = pattern.charAt(i) + // console.error(i, c) + if (c === "\\") { + escaping = !escaping + } else if (c === "{" && !escaping) { + prefix = pattern.substr(0, i) + break + } + } + + // actually no sets, all { were escaped. + if (prefix === null) { + // console.error("no sets") + return [pattern] + } + + var tail = braceExpand(pattern.substr(i), options) + return tail.map(function (t) { + return prefix + t + }) + } + + // now we have something like: + // {b,c{d,e},{f,g}h}x{y,z} + // walk through the set, expanding each part, until + // the set ends. then, we'll expand the suffix. + // If the set only has a single member, then'll put the {} back + + // first, handle numeric sets, since they're easier + var numset = pattern.match(/^\{(-?[0-9]+)\.\.(-?[0-9]+)\}/) + if (numset) { + // console.error("numset", numset[1], numset[2]) + var suf = braceExpand(pattern.substr(numset[0].length), options) + , start = +numset[1] + , end = +numset[2] + , inc = start > end ? -1 : 1 + , set = [] + for (var i = start; i != (end + inc); i += inc) { + // append all the suffixes + for (var ii = 0, ll = suf.length; ii < ll; ii ++) { + set.push(i + suf[ii]) + } + } + return set + } + + // ok, walk through the set + // We hope, somewhat optimistically, that there + // will be a } at the end. + // If the closing brace isn't found, then the pattern is + // interpreted as braceExpand("\\" + pattern) so that + // the leading \{ will be interpreted literally. + var i = 1 // skip the \{ + , depth = 1 + , set = [] + , member = "" + , sawEnd = false + , escaping = false + + function addMember () { + set.push(member) + member = "" + } + + // console.error("Entering for") + FOR: for (i = 1, l = pattern.length; i < l; i ++) { + var c = pattern.charAt(i) + // console.error("", i, c) + + if (escaping) { + escaping = false + member += "\\" + c + } else { + switch (c) { + case "\\": + escaping = true + continue + + case "{": + depth ++ + member += "{" + continue + + case "}": + depth -- + // if this closes the actual set, then we're done + if (depth === 0) { + addMember() + // pluck off the close-brace + i ++ + break FOR + } else { + member += c + continue + } + + case ",": + if (depth === 1) { + addMember() + } else { + member += c + } + continue + + default: + member += c + continue + } // switch + } // else + } // for + + // now we've either finished the set, and the suffix is + // pattern.substr(i), or we have *not* closed the set, + // and need to escape the leading brace + if (depth !== 0) { + // console.error("didn't close", pattern) + return braceExpand("\\" + pattern, options) + } + + // x{y,z} -> ["xy", "xz"] + // console.error("set", set) + // console.error("suffix", pattern.substr(i)) + var suf = braceExpand(pattern.substr(i), options) + // ["b", "c{d,e}","{f,g}h"] -> + // [["b"], ["cd", "ce"], ["fh", "gh"]] + var addBraces = set.length === 1 + // console.error("set pre-expanded", set) + set = set.map(function (p) { + return braceExpand(p, options) + }) + // console.error("set expanded", set) + + + // [["b"], ["cd", "ce"], ["fh", "gh"]] -> + // ["b", "cd", "ce", "fh", "gh"] + set = set.reduce(function (l, r) { + return l.concat(r) + }) + + if (addBraces) { + set = set.map(function (s) { + return "{" + s + "}" + }) + } + + // now attach the suffixes. + var ret = [] + for (var i = 0, l = set.length; i < l; i ++) { + for (var ii = 0, ll = suf.length; ii < ll; ii ++) { + ret.push(set[i] + suf[ii]) + } + } + return ret +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === "**") return GLOBSTAR + if (pattern === "") return "" + + var re = "" + , hasMagic = false + , escaping = false + // ? => one single character + , patternListStack = [] + , plType + , stateChar + , inClass = false + , reClassStart = -1 + , classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + , patternStart = pattern.charAt(0) === "." ? "" // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? "(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))" + : "(?!\\.)" + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case "*": + re += star + hasMagic = true + break + case "?": + re += qmark + hasMagic = true + break + default: + re += "\\"+stateChar + break + } + stateChar = false + } + } + + for ( var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i ++ ) { + + if (options.debug) { + console.error("%s\t%s %s %j", pattern, i, re, c) + } + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += "\\" + c + escaping = false + continue + } + + SWITCH: switch (c) { + case "/": + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + + case "\\": + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case "?": + case "*": + case "+": + case "@": + case "!": + if (options.debug) { + console.error("%s\t%s %s %j <-- stateChar", pattern, i, re, c) + } + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + if (c === "!" && i === classStart + 1) c = "^" + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case "(": + if (inClass) { + re += "(" + continue + } + + if (!stateChar) { + re += "\\(" + continue + } + + plType = stateChar + patternListStack.push({ type: plType + , start: i - 1 + , reStart: re.length }) + re += stateChar === "!" ? "(?!" : "(?:" + stateChar = false + continue + + case ")": + if (inClass || !patternListStack.length) { + re += "\\)" + continue + } + + hasMagic = true + re += ")" + plType = patternListStack.pop().type + switch (plType) { + case "?": + case "+": + case "*": re += plType + case "!": // already handled by the start + case "@": break // the default anyway + } + continue + + case "|": + if (inClass || !patternListStack.length || escaping) { + re += "\\|" + escaping = false + continue + } + + re += "|" + continue + + // these are mostly the same in regexp and glob + case "[": + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += "\\" + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case "]": + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += "\\" + c + escaping = false + continue + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === "^" && inClass)) { + re += "\\" + } + + re += c + + } // switch + } // for + + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + var cs = pattern.substr(classStart + 1) + , sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + "\\[" + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + var pl + while (pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + 3) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2})*)(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = "\\" + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + "|" + }) + + // console.error("tail=%j\n %s", tail, tail) + var t = pl.type === "*" ? star + : pl.type === "?" ? qmark + : "\\" + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + + t + "\\(" + + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += "\\\\" + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case ".": + case "[": + case "(": addPatternStart = true + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== "" && hasMagic) re = "(?=.)" + re + + if (addPatternStart) re = patternStart + re + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [ re, hasMagic ] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? "i" : "" + , regExp = new RegExp("^" + re + "$", flags) + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) return this.regexp = false + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + , flags = options.nocase ? "i" : "" + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === "string") ? regExpEscape(p) + : p._src + }).join("\\\/") + }).join("|") + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = "^" + re + "$" + + // can match anything, as long as it's not this. + if (this.negate) re = "^(?!" + re + ").*$" + + try { + return this.regexp = new RegExp(re, flags) + } catch (ex) { + return this.regexp = false + } +} + +minimatch.match = function (list, pattern, options) { + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + // console.error("match", f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === "" + + if (f === "/" && partial) return true + + var options = this.options + + // first, normalize any slash-separated path parts. + // f = path.normalize(f) + + // windows: need to use /, not \ + // On other platforms, \ is a valid (albeit bad) filename char. + if (process.platform === "win32") { + f = f.split("\\").join("/") + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + if (options.debug) { + console.error(this.pattern, "split", f) + } + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + // console.error(this.pattern, "set", set) + + for (var i = 0, l = set.length; i < l; i ++) { + var pattern = set[i] + var hit = this.matchOne(f, pattern, partial) + if (hit) { + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + if (options.debug) { + console.error("matchOne", + { "this": this + , file: file + , pattern: pattern }) + } + + if (options.matchBase && pattern.length === 1) { + file = path.basename(file.join("/")).split("/") + } + + if (options.debug) { + console.error("matchOne", file.length, pattern.length) + } + + for ( var fi = 0 + , pi = 0 + , fl = file.length + , pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi ++, pi ++ ) { + + if (options.debug) { + console.error("matchOne loop") + } + var p = pattern[pi] + , f = file[fi] + + if (options.debug) { + console.error(pattern, p, f) + } + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + , pr = pi + 1 + if (pr === pl) { + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for ( ; fi < fl; fi ++) { + if (file[fi] === "." || file[fi] === ".." || + (!options.dot && file[fi].charAt(0) === ".")) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + WHILE: while (fr < fl) { + var swallowee = file[fr] + if (swallowee === "." || swallowee === ".." || + (!options.dot && swallowee.charAt(0) === ".")) { + // console.error("dot detected!") + break WHILE + } + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + // found a match. + return true + } else { + // ** swallows a segment, and continue. + fr ++ + } + } + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + // console.error("\n>>> no match, partial?", file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === "string") { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + if (options.debug) { + console.error("string match", p, f, hit) + } + } else { + hit = f.match(p) + if (options.debug) { + console.error("pattern match", p, f, hit) + } + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === "") + return emptyFileEnd + } + + // should be unreachable. + throw new Error("wtf?") +} + + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, "$1") +} + + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&") +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/package.json b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/package.json new file mode 100644 index 0000000000..198273eff4 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/node_modules/minimatch/package.json @@ -0,0 +1,44 @@ +{ + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me" + }, + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "0.2.0", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap test" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "lru-cache": "~1.0.5" + }, + "devDependencies": { + "tap": "~0.1.3" + }, + "licenses": [ + { + "type": "MIT", + "url": "http://github.com/isaacs/minimatch/raw/master/LICENSE" + } + ], + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "_id": "minimatch@0.2.0", + "optionalDependencies": {}, + "_engineSupported": true, + "_npmVersion": "1.1.8", + "_nodeVersion": "v0.7.7-pre", + "_defaultsLoaded": true, + "_from": "minimatch@0.2" +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/glob/package.json b/deps/npm/node_modules/node-gyp/node_modules/glob/package.json new file mode 100644 index 0000000000..708953f928 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/glob/package.json @@ -0,0 +1,42 @@ +{ + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "name": "glob", + "description": "a little globber", + "version": "3.1.6", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "engines": { + "node": "*" + }, + "dependencies": { + "minimatch": "0.2", + "graceful-fs": "~1.1.2", + "inherits": "1" + }, + "devDependencies": { + "tap": "~0.2.3", + "mkdirp": "0", + "rimraf": "1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "_id": "glob@3.1.6", + "optionalDependencies": {}, + "_engineSupported": true, + "_npmVersion": "1.1.8", + "_nodeVersion": "v0.7.7-pre", + "_defaultsLoaded": true, + "_from": "glob@3" +} diff --git a/deps/npm/node_modules/node-gyp/package.json b/deps/npm/node_modules/node-gyp/package.json new file mode 100644 index 0000000000..27133133a7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/package.json @@ -0,0 +1,57 @@ +{ + "name": "node-gyp", + "description": "Node.js native addon build tool", + "keywords": [ + "native", + "addon", + "module", + "c", + "c++", + "bindings", + "gyp" + ], + "version": "0.3.5", + "installVersion": 5, + "author": { + "name": "Nathan Rajlich", + "email": "nathan@tootallnate.net", + "url": "http://tootallnate.net" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-gyp.git" + }, + "preferGlobal": true, + "bin": { + "node-gyp": "./bin/node-gyp.js" + }, + "main": "./lib/node-gyp.js", + "dependencies": { + "ansi": "0.0.x", + "glob": "3", + "fstream": "~0.1.13", + "minimatch": "~0.1.4", + "mkdirp": "0.3.0", + "nopt": "1", + "request": "2.9.x", + "rimraf": "2", + "semver": "1", + "tar": "~0.1.12", + "which": "1" + }, + "engines": { + "node": ">= 0.6.0" + }, + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "_id": "node-gyp@0.3.5", + "devDependencies": {}, + "optionalDependencies": {}, + "_engineSupported": true, + "_npmVersion": "1.1.8", + "_nodeVersion": "v0.7.7-pre", + "_defaultsLoaded": true, + "_from": "node-gyp@~0.3.4" +} diff --git a/deps/npm/node_modules/node-gyp/updateLegacy.sh b/deps/npm/node_modules/node-gyp/updateLegacy.sh new file mode 100755 index 0000000000..941c699edd --- /dev/null +++ b/deps/npm/node_modules/node-gyp/updateLegacy.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env sh + +cp -fpv ~/node/common.gypi legacy/ +cp -fpv ~/node/tools/addon.gypi legacy/tools/ +cp -fpv ~/node/tools/gyp_addon legacy/tools/ +cp -rfpv ~/node/tools/gyp legacy/tools/ + +# we don't care about gyp's test suite and it's rather big... +rm -rf legacy/tools/gyp/test diff --git a/deps/npm/node_modules/request/main.js b/deps/npm/node_modules/request/main.js index f651202740..5a6bd9eb04 100644 --- a/deps/npm/node_modules/request/main.js +++ b/deps/npm/node_modules/request/main.js @@ -137,12 +137,12 @@ Request.prototype.init = function (options) { // do the HTTP CONNECT dance using koichik/node-tunnel if (http.globalAgent && self.uri.protocol === "https:") { - self.tunnel = true var tunnelFn = self.proxy.protocol === "http:" ? tunnel.httpsOverHttp : tunnel.httpsOverHttps var tunnelOptions = { proxy: { host: self.proxy.hostname - , port: +self.proxy.port } + , port: +self.proxy.port + , proxyAuth: self.proxy.auth } , ca: this.ca } self.agent = tunnelFn(tunnelOptions) @@ -170,7 +170,7 @@ Request.prototype.init = function (options) { self.setHost = true } - self.jar(options.jar) + self.jar(self._jar || options.jar) if (!self.uri.pathname) {self.uri.pathname = '/'} if (!self.uri.port) { @@ -348,6 +348,70 @@ Request.prototype.init = function (options) { }) } +// Must call this when following a redirect from https to http or vice versa +// Attempts to keep everything as identical as possible, but update the +// httpModule, Tunneling agent, and/or Forever Agent in use. +Request.prototype._updateProtocol = function () { + var self = this + var protocol = self.uri.protocol + + if (protocol === 'https:') { + // previously was doing http, now doing https + // if it's https, then we might need to tunnel now. + if (self.proxy) { + self.tunnel = true + var tunnelFn = self.proxy.protocol === 'http:' + ? tunnel.httpsOverHttp : tunnel.httpsOverHttps + var tunnelOptions = { proxy: { host: self.proxy.hostname + , post: +self.proxy.port + , proxyAuth: self.proxy.auth } + , ca: self.ca } + self.agent = tunnelFn(tunnelOptions) + return + } + + self.httpModule = https + switch (self.agentClass) { + case ForeverAgent: + self.agentClass = ForeverAgent.SSL + break + case http.Agent: + self.agentClass = https.Agent + break + default: + // nothing we can do. Just hope for the best. + return + } + + // if there's an agent, we need to get a new one. + if (self.agent) self.agent = self.getAgent() + + } else { + if (log) log('previously https, now http') + // previously was doing https, now doing http + // stop any tunneling. + if (self.tunnel) self.tunnel = false + self.httpModule = http + switch (self.agentClass) { + case ForeverAgent.SSL: + self.agentClass = ForeverAgent + break + case https.Agent: + self.agentClass = http.Agent + break + default: + // nothing we can do. just hope for the best + return + } + + // if there's an agent, then get a new one. + if (self.agent) { + self.agent = null + self.agent = self.getAgent() + } + } +} + Request.prototype.getAgent = function () { var Agent = this.agentClass var options = {} @@ -373,7 +437,11 @@ Request.prototype.getAgent = function () { poolKey += this.host + ':' + this.port } - if (options.ca) { + // ca option is only relevant if proxy or destination are https + var proxy = this.proxy + if (typeof proxy === 'string') proxy = url.parse(proxy) + var caRelevant = (proxy && proxy.protocol === 'https:') || this.uri.protocol === 'https:' + if (options.ca && caRelevant) { if (poolKey) poolKey += ':' poolKey += options.ca } @@ -383,6 +451,9 @@ Request.prototype.getAgent = function () { return this.httpModule.globalAgent } + // we're using a stored agent. Make sure it's protocol-specific + poolKey = this.uri.protocol + poolKey + // already generated an agent for this setting if (this.pool[poolKey]) return this.pool[poolKey] @@ -439,7 +510,15 @@ Request.prototype.start = function () { if (!isUrl.test(response.headers.location)) { response.headers.location = url.resolve(self.uri.href, response.headers.location) } - self.uri = response.headers.location + + var uriPrev = self.uri + self.uri = url.parse(response.headers.location) + + // handle the case where we change protocol from https to http or vice versa + if (self.uri.protocol !== uriPrev.protocol) { + self._updateProtocol() + } + self.redirects.push( { statusCode : response.statusCode , redirectUri: response.headers.location @@ -449,6 +528,7 @@ Request.prototype.start = function () { delete self.req delete self.agent delete self._started + delete self.body if (self.headers) { delete self.headers.host } @@ -598,17 +678,17 @@ Request.prototype.setHeaders = function (headers) { return this } Request.prototype.qs = function (q, clobber) { - var uri = { - protocol: this.uri.protocol, - host: this.uri.host, - pathname: this.uri.pathname, - query: clobber ? q : qs.parse(this.uri.query), - hash: this.uri.hash - }; - if (!clobber) for (var i in q) uri.query[i] = q[i] - - this.uri= url.parse(url.format(uri)) - + var base + if (!clobber && this.uri.query) base = qs.parse(this.uri.query) + else base = {} + + for (var i in q) { + base[i] = q[i] + } + + this.uri = url.parse(this.uri.href.split('?')[0] + '?' + qs.stringify(base)) + this.url = this.uri + return this } Request.prototype.form = function (form) { @@ -787,6 +867,7 @@ function initParams(uri, options, callback) { } function request (uri, options, callback) { + if (typeof uri === 'undefined') throw new Error('undefined is not a valid uri or options object.') if ((typeof options === 'function') && !callback) callback = options; if (typeof options === 'object') { options.uri = uri; @@ -841,12 +922,12 @@ request.get = request request.post = function (uri, options, callback) { var params = initParams(uri, options, callback); params.options.method = 'POST'; - return request(params.uri, params.options, params.callback) + return request(params.uri || null, params.options, params.callback) } request.put = function (uri, options, callback) { var params = initParams(uri, options, callback); params.options.method = 'PUT' - return request(params.uri, params.options, params.callback) + return request(params.uri || null, params.options, params.callback) } request.head = function (uri, options, callback) { var params = initParams(uri, options, callback); @@ -857,12 +938,12 @@ request.head = function (uri, options, callback) { params.options.multipart) { throw new Error("HTTP HEAD requests MUST NOT include a request body.") } - return request(params.uri, params.options, params.callback) + return request(params.uri || null, params.options, params.callback) } request.del = function (uri, options, callback) { var params = initParams(uri, options, callback); params.options.method = 'DELETE' - return request(params.uri, params.options, params.callback) + return request(params.uri || null, params.options, params.callback) } request.jar = function () { return new CookieJar diff --git a/deps/npm/node_modules/request/package.json b/deps/npm/node_modules/request/package.json index 4a62c5a469..0e6ddc5d85 100644 --- a/deps/npm/node_modules/request/package.json +++ b/deps/npm/node_modules/request/package.json @@ -35,11 +35,11 @@ "devDependencies": {}, "optionalDependencies": {}, "_engineSupported": true, - "_npmVersion": "1.1.2", - "_nodeVersion": "v0.7.6-pre", + "_npmVersion": "1.1.8", + "_nodeVersion": "v0.6.12", "_defaultsLoaded": true, "dist": { - "shasum": "d1f4855a90a9e69bf6cd2a68503e253cc4a27a71" + "shasum": "6f07c2ba3acfbe0cfe941f43647102740f05ff73" }, - "_from": "request@~2.9" + "_from": "../request" } diff --git a/deps/npm/node_modules/request/vendor/cookie/index.js b/deps/npm/node_modules/request/vendor/cookie/index.js index 1eb2eaa220..ff44b3e629 100644 --- a/deps/npm/node_modules/request/vendor/cookie/index.js +++ b/deps/npm/node_modules/request/vendor/cookie/index.js @@ -21,22 +21,27 @@ var url = require('url'); var Cookie = exports = module.exports = function Cookie(str, req) { this.str = str; - // First key is the name - this.name = str.substr(0, str.indexOf('=')).trim(); - // Map the key/val pairs str.split(/ *; */).reduce(function(obj, pair){ var p = pair.indexOf('='); - if(p > 0) - obj[pair.substring(0, p).trim()] = pair.substring(p + 1).trim(); - else - obj[pair.trim()] = true; + var key = p > 0 ? pair.substring(0, p).trim() : pair.trim(); + var lowerCasedKey = key.toLowerCase(); + var value = p > 0 ? pair.substring(p + 1).trim() : true; + + if (!obj.name) { + // First key is the name + obj.name = key; + obj.value = value; + } + else if (lowerCasedKey === 'httponly') { + obj.httpOnly = value; + } + else { + obj[lowerCasedKey] = value; + } return obj; }, this); - // Assign value - this.value = this[this.name]; - // Expires this.expires = this.expires ? new Date(this.expires) diff --git a/deps/npm/node_modules/tar/lib/extract.js b/deps/npm/node_modules/tar/lib/extract.js index e45974c723..c34a81e21f 100644 --- a/deps/npm/node_modules/tar/lib/extract.js +++ b/deps/npm/node_modules/tar/lib/extract.js @@ -24,6 +24,10 @@ function Extract (opts) { opts.type = "Directory" opts.Directory = true + // similar to --strip or --strip-components + opts.strip = +opts.strip + if (!opts.strip || opts.strip <= 0) opts.strip = 0 + this._fst = fstream.Writer(opts) this.pause() @@ -33,6 +37,16 @@ function Extract (opts) { // of the tarball. So, they need to be resolved against // the target directory in order to be created properly. me.on("entry", function (entry) { + // if there's a "strip" argument, then strip off that many + // path components. + if (opts.strip) { + var p = entry.path.split("/").slice(opts.strip).join("/") + entry.path = entry.props.path = p + if (entry.linkpath) { + var lp = entry.linkpath.split("/").slice(opts.strip).join("/") + entry.linkpath = entry.props.linkpath = lp + } + } if (entry.type !== "Link") return entry.linkpath = entry.props.linkpath = path.join(opts.path, path.join("/", entry.props.linkpath)) diff --git a/deps/npm/node_modules/tar/package.json b/deps/npm/node_modules/tar/package.json index aa53022c64..849d0d6918 100644 --- a/deps/npm/node_modules/tar/package.json +++ b/deps/npm/node_modules/tar/package.json @@ -1,8 +1,12 @@ { - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, "name": "tar", "description": "tar for node", - "version": "0.1.12", + "version": "0.1.13", "repository": { "type": "git", "url": "git://github.com/isaacs/node-tar.git" @@ -22,5 +26,19 @@ "devDependencies": { "tap": "0.x", "rimraf": "1.x" - } + }, + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "_id": "tar@0.1.13", + "optionalDependencies": {}, + "_engineSupported": true, + "_npmVersion": "1.1.4", + "_nodeVersion": "v0.7.6-pre", + "_defaultsLoaded": true, + "dist": { + "shasum": "804bdaaacaab928ec1c9bbd8b848a042c3adacb2" + }, + "_from": "tar@0.1.13" } diff --git a/deps/npm/package.json b/deps/npm/package.json index 53876fafa4..5843c14152 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -10,7 +10,7 @@ "install", "package.json" ], - "version": "1.1.4", + "version": "1.1.9", "preferGlobal": true, "config": { "publishtest": false @@ -47,13 +47,14 @@ "request": "~2.9", "which": "1", "tar": "~0.1.12", - "fstream": "~0.1.5", + "fstream": "~0.1.13", "block-stream": "*", "inherits": "1", "mkdirp": "0.3", "fast-list": "~1.0.1", "read": "0", - "lru-cache": "1" + "lru-cache": "1", + "node-gyp": "~0.3.4" }, "bundleDependencies": [ "slide", @@ -75,7 +76,8 @@ "mkdirp", "fast-list", "read", - "lru-cache" + "lru-cache", + "node-gyp" ], "devDependencies": { "ronn": "https://github.com/isaacs/ronnjs/tarball/master" diff --git a/deps/npm/test/packages/npm-test-bundled-git/minimatch-expected.json b/deps/npm/test/packages/npm-test-bundled-git/minimatch-expected.json new file mode 100644 index 0000000000..62b20d698e --- /dev/null +++ b/deps/npm/test/packages/npm-test-bundled-git/minimatch-expected.json @@ -0,0 +1,29 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "0.2.1", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap test" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "lru-cache": "~1.0.5" + }, + "devDependencies": { + "tap": "~0.1.3" + }, + "licenses" : [ + { + "type" : "MIT", + "url" : "http://github.com/isaacs/minimatch/raw/master/LICENSE" + } + ] +} diff --git a/deps/npm/test/packages/npm-test-bundled-git/package.json b/deps/npm/test/packages/npm-test-bundled-git/package.json new file mode 100644 index 0000000000..69e0eb7a8b --- /dev/null +++ b/deps/npm/test/packages/npm-test-bundled-git/package.json @@ -0,0 +1,5 @@ +{"name":"npm-test-bundled-git" +,"scripts":{"test":"node test.js"} +,"version":"1.2.5" +,"dependencies":{"glob":"git://github.com/isaacs/node-glob.git#npm-test"} +,"bundledDependencies":["glob"]} diff --git a/deps/npm/test/packages/npm-test-bundled-git/test.js b/deps/npm/test/packages/npm-test-bundled-git/test.js new file mode 100644 index 0000000000..4fcc54cafe --- /dev/null +++ b/deps/npm/test/packages/npm-test-bundled-git/test.js @@ -0,0 +1,4 @@ +var a = require("./node_modules/glob/node_modules/minimatch/package.json") +var e = require("./minimatch-expected.json") +var assert = require("assert") +assert.deepEqual(a, e, "didn't get expected minimatch/package.json") diff --git a/deps/npm/test/packages/npm-test-env-reader/package.json b/deps/npm/test/packages/npm-test-env-reader/package.json index ddd5c91abd..99d7fc9307 100644 --- a/deps/npm/test/packages/npm-test-env-reader/package.json +++ b/deps/npm/test/packages/npm-test-env-reader/package.json @@ -1,14 +1,14 @@ { "name":"npm-test-env-reader" , "version" : "1.2.3" , "scripts" : - { "install" : "./test.sh" - , "preinstall" : "./test.sh" - , "preuninstall" : "./test.sh" - , "postuninstall" : "./test.sh" - , "test" : "./test.sh" - , "stop" : "./test.sh" - , "start" : "./test.sh" - , "restart" : "./test.sh" - , "foo" : "./test.sh" + { "install" : "node test.js" + , "preinstall" : "node test.js" + , "preuninstall" : "node test.js" + , "postuninstall" : "node test.js" + , "test" : "node test.js" + , "stop" : "node test.js" + , "start" : "node test.js" + , "restart" : "node test.js" + , "foo" : "node test.js" } } diff --git a/deps/npm/test/packages/npm-test-env-reader/test.js b/deps/npm/test/packages/npm-test-env-reader/test.js new file mode 100755 index 0000000000..d6cb7893a7 --- /dev/null +++ b/deps/npm/test/packages/npm-test-env-reader/test.js @@ -0,0 +1,9 @@ +var envs = [] +for (var e in process.env) { + if (e.match(/npm|^path$/i)) envs.push(e + '=' + process.env[e]) +} +envs.sort(function (a, b) { + return a === b ? 0 : a > b ? -1 : 1 +}).forEach(function (e) { + console.log(e) +}) diff --git a/deps/npm/test/packages/npm-test-env-reader/test.sh b/deps/npm/test/packages/npm-test-env-reader/test.sh deleted file mode 100755 index b4ca4374ed..0000000000 --- a/deps/npm/test/packages/npm-test-env-reader/test.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env sh -env | grep npm | sort | uniq -echo PATH=$PATH diff --git a/deps/npm/test/packages/npm-test-optional-deps/package.json b/deps/npm/test/packages/npm-test-optional-deps/package.json index ebcd56832d..56c6f09ed0 100644 --- a/deps/npm/test/packages/npm-test-optional-deps/package.json +++ b/deps/npm/test/packages/npm-test-optional-deps/package.json @@ -1,10 +1,12 @@ { "name": "npm-test-optional-deps" , "version": "1.2.5" +, "scripts": { "test": "node test.js" } , "optionalDependencies": { "npm-test-foobarzaaakakaka": "http://example.com/" , "dnode": "10.999.14234" - , "sax": "*" + , "sax": "0.3.5" , "999 invalid name": "1.2.3" , "glob": "some invalid version 99 #! $$ x y z" + , "npm-test-failer":"*" } } diff --git a/deps/npm/test/packages/npm-test-optional-deps/test.js b/deps/npm/test/packages/npm-test-optional-deps/test.js new file mode 100644 index 0000000000..2232906d64 --- /dev/null +++ b/deps/npm/test/packages/npm-test-optional-deps/test.js @@ -0,0 +1,9 @@ +var fs = require("fs") +var assert = require("assert") +var path = require("path") + +// sax should be the only dep that ends up installed + +var dir = path.resolve(__dirname, "node_modules") +assert.deepEqual(fs.readdirSync(dir), ["sax"]) +assert.equal(require("sax/package.json").version, "0.3.5") diff --git a/deps/npm/test/packages/npm-test-platform-all/package.json b/deps/npm/test/packages/npm-test-platform-all/package.json new file mode 100644 index 0000000000..07624363e6 --- /dev/null +++ b/deps/npm/test/packages/npm-test-platform-all/package.json @@ -0,0 +1,5 @@ +{"name":"npm-test-platform-all" +,"version":"9.9.9-9" +,"homepage":"http://www.zombo.com/" +,"os":["darwin","linux","win32","solaris","haiku","sunos","freebsd","openbsd","netbsd"] +,"cpu":["arm","mips","ia32","x64","sparc"]} diff --git a/deps/npm/test/packages/npm-test-platform/package.json b/deps/npm/test/packages/npm-test-platform/package.json new file mode 100644 index 0000000000..d6c3e09e1d --- /dev/null +++ b/deps/npm/test/packages/npm-test-platform/package.json @@ -0,0 +1,5 @@ +{"name":"npm-test-platform" +,"version":"9.9.9-9" +,"homepage":"http://www.youtube.com/watch?v=dQw4w9WgXcQ" +,"os":["!this_is_not_a_real_os", "!neither_is_this"] +,"cpu":["!this_is_not_a_real_cpu","!this_isnt_either"]} diff --git a/deps/npm/test/packages/npm-test-shrinkwrap/npm-shrinkwrap.json b/deps/npm/test/packages/npm-test-shrinkwrap/npm-shrinkwrap.json index c4f7fbf815..27553c181f 100644 --- a/deps/npm/test/packages/npm-test-shrinkwrap/npm-shrinkwrap.json +++ b/deps/npm/test/packages/npm-test-shrinkwrap/npm-shrinkwrap.json @@ -7,11 +7,11 @@ "from": "https://raw.github.com/gist/1837112/index.js" }, "glob": { - "version": "3.1.4", - "from": "git://github.com/isaacs/node-glob.git", + "version": "3.1.5", + "from": "git://github.com/isaacs/node-glob.git#npm-test", "dependencies": { "minimatch": { - "version": "0.2.0", + "version": "0.2.1", "dependencies": { "lru-cache": { "version": "1.0.5" diff --git a/deps/npm/test/packages/npm-test-shrinkwrap/package.json b/deps/npm/test/packages/npm-test-shrinkwrap/package.json index 34a8abc21c..b2684a1233 100644 --- a/deps/npm/test/packages/npm-test-shrinkwrap/package.json +++ b/deps/npm/test/packages/npm-test-shrinkwrap/package.json @@ -4,7 +4,7 @@ "version": "0.0.0", "dependencies": { "npm-test-single-file": "https://raw.github.com/gist/1837112/index.js", - "glob": "git://github.com/isaacs/node-glob.git", + "glob": "git://github.com/isaacs/node-glob.git#npm-test", "minimatch": "~0.1.0" }, "scripts": { From 571a51989e282edbe3d5e380797aa41498863514 Mon Sep 17 00:00:00 2001 From: isaacs Date: Thu, 15 Mar 2012 08:14:22 -0700 Subject: [PATCH 5/9] Upgrade libuv to 66a959c4052 --- deps/uv/src/unix/linux.c | 12 ++++++++---- deps/uv/test/test-ref.c | 4 ++-- deps/uv/test/test-tty.c | 4 ++-- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/deps/uv/src/unix/linux.c b/deps/uv/src/unix/linux.c index 3ed6565316..809e644d0d 100644 --- a/deps/uv/src/unix/linux.c +++ b/deps/uv/src/unix/linux.c @@ -170,11 +170,16 @@ uint64_t uv_get_total_memory(void) { #if HAVE_INOTIFY_INIT || HAVE_INOTIFY_INIT1 static int new_inotify_fd(void) { -#if HAVE_INOTIFY_INIT1 - return inotify_init1(IN_NONBLOCK | IN_CLOEXEC); -#else int fd; +#if HAVE_INOTIFY_INIT1 + fd = inotify_init1(IN_NONBLOCK | IN_CLOEXEC); + if (fd != -1) + return fd; + if (errno != ENOSYS) + return -1; +#endif + if ((fd = inotify_init()) == -1) return -1; @@ -184,7 +189,6 @@ static int new_inotify_fd(void) { } return fd; -#endif } diff --git a/deps/uv/test/test-ref.c b/deps/uv/test/test-ref.c index 9a474b5636..cab3a70300 100644 --- a/deps/uv/test/test-ref.c +++ b/deps/uv/test/test-ref.c @@ -45,7 +45,7 @@ static void write_unref_cb(uv_connect_t* req, int status) { ASSERT(status == 0); uv_write(&write_req, req->handle, &buf, 1, (uv_write_cb) fail_cb); - uv_unref(uv_default_loop()); // uv_write refs the loop + uv_unref(uv_default_loop()); /* uv_write refs the loop */ } @@ -55,7 +55,7 @@ static void shutdown_unref_cb(uv_connect_t* req, int status) { ASSERT(status == 0); uv_shutdown(&shutdown_req, req->handle, (uv_shutdown_cb) fail_cb); - uv_unref(uv_default_loop()); // uv_shutdown refs the loop + uv_unref(uv_default_loop()); /* uv_shutdown refs the loop */ } diff --git a/deps/uv/test/test-tty.c b/deps/uv/test/test-tty.c index 24f0def6a6..ded59c9f77 100644 --- a/deps/uv/test/test-tty.c +++ b/deps/uv/test/test-tty.c @@ -25,7 +25,7 @@ #ifdef _WIN32 # include # include -#else // Unix +#else /* Unix */ # include # include #endif @@ -37,7 +37,7 @@ TEST_IMPL(tty) { uv_tty_t tty_in, tty_out; uv_loop_t* loop = uv_default_loop(); - // Make sure we have an FD that refers to a tty + /* Make sure we have an FD that refers to a tty */ #ifdef _WIN32 HANDLE handle; handle = CreateFileA("conin$", From 06bf0707f7dc30b9b4e6966de76af324064c3a06 Mon Sep 17 00:00:00 2001 From: isaacs Date: Thu, 15 Mar 2012 08:22:31 -0700 Subject: [PATCH 6/9] makefile: Fix 'make doc' --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 42679e0b48..a13f08a141 100644 --- a/Makefile +++ b/Makefile @@ -102,7 +102,7 @@ website_files = \ out/doc/logos/index.html \ $(doc_images) -doc: node $(apidoc_dirs) $(website_files) $(apiassets) $(apidocs) tools/doc/ +doc: program $(apidoc_dirs) $(website_files) $(apiassets) $(apidocs) tools/doc/ $(apidoc_dirs): mkdir -p $@ From 9f7f86b534f8556290eb8cad915984ff4ca54996 Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 14 Mar 2012 17:07:24 -0700 Subject: [PATCH 7/9] 2012.03.15 Version 0.6.13 (stable) * Windows: Many libuv test fixes (Bert Belder) * Windows: avoid uv_guess_handle crash in when fd < 0 (Bert Belder) * Map EBUSY and ENOTEMPTY errors (Bert Belder) * Windows: include syscall in fs errors (Bert Belder) * Fix fs.watch ENOSYS on Linux kernel version mismatch (Ben Noordhuis) * Update npm to 1.1.9 - upgrade node-gyp to 0.3.5 (Nathan Rajlich) - Fix isaacs/npm#2249 Add cache-max and cache-min configs - Properly redirect across https/http registry requests - log config usage if undefined key in set function (Kris Windham) - Add support for os/cpu fields in package.json (Adam Blackburn) - Automatically node-gyp packages containing a binding.gyp - Fix failures unpacking in UNC shares - Never create un-listable directories - Handle cases where an optionalDependency fails to build --- ChangeLog | 26 +++++++++++++++++++++++++- doc/about/index.html | 2 +- doc/community/index.html | 2 +- doc/index.html | 18 +++++++++--------- doc/logos/index.html | 2 +- doc/template.html | 2 +- src/node_version.h | 2 +- 7 files changed, 39 insertions(+), 15 deletions(-) diff --git a/ChangeLog b/ChangeLog index 7bb9808d46..e831c3fc6d 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,4 +1,28 @@ -2012.03.02 Version 0.6.12 (stable) +2012.03.15 Version 0.6.13 (stable) + +* Windows: Many libuv test fixes (Bert Belder) + +* Windows: avoid uv_guess_handle crash in when fd < 0 (Bert Belder) + +* Map EBUSY and ENOTEMPTY errors (Bert Belder) + +* Windows: include syscall in fs errors (Bert Belder) + +* Fix fs.watch ENOSYS on Linux kernel version mismatch (Ben Noordhuis) + +* Update npm to 1.1.9 + - upgrade node-gyp to 0.3.5 (Nathan Rajlich) + - Fix isaacs/npm#2249 Add cache-max and cache-min configs + - Properly redirect across https/http registry requests + - log config usage if undefined key in set function (Kris Windham) + - Add support for os/cpu fields in package.json (Adam Blackburn) + - Automatically node-gyp packages containing a binding.gyp + - Fix failures unpacking in UNC shares + - Never create un-listable directories + - Handle cases where an optionalDependency fails to build + + +2012.03.02 Version 0.6.12 (stable), 48a2d34cfe6b7e1c9d15202a4ef5e3c82d1fba35 * Upgrade V8 to 3.6.6.24 diff --git a/doc/about/index.html b/doc/about/index.html index abafc77c08..c2c4d74271 100644 --- a/doc/about/index.html +++ b/doc/about/index.html @@ -130,7 +130,7 @@ console.log('Server running at http://127.0.0.1:1337/');
  • -

    Copyright 2010 Joyent, Inc, Node.js is a trademark of Joyent, Inc. View license.

    +

    Copyright 2010-2012 Joyent, Inc, Node.js is a trademark of Joyent, Inc. View license.

    diff --git a/doc/community/index.html b/doc/community/index.html index d23161dd6a..b928b69d1b 100644 --- a/doc/community/index.html +++ b/doc/community/index.html @@ -180,7 +180,7 @@
  • -

    Copyright 2010 Joyent, Inc, Node.js is a trademark of Joyent, Inc. View license.

    +

    Copyright 2010-2012 Joyent, Inc, Node.js is a trademark of Joyent, Inc. View license.

    diff --git a/src/node_version.h b/src/node_version.h index 7219bdac99..fcd830beaa 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -29,7 +29,7 @@ #define NODE_MAJOR_VERSION 0 #define NODE_MINOR_VERSION 6 #define NODE_PATCH_VERSION 13 -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n) From b9bd2d3eb9aa57926dc76294a376ed95c76fc812 Mon Sep 17 00:00:00 2001 From: isaacs Date: Thu, 15 Mar 2012 10:33:33 -0700 Subject: [PATCH 8/9] Now working on v0.6.14 --- src/node_version.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/node_version.h b/src/node_version.h index fcd830beaa..3445e34de4 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -28,8 +28,8 @@ #define NODE_MAJOR_VERSION 0 #define NODE_MINOR_VERSION 6 -#define NODE_PATCH_VERSION 13 -#define NODE_VERSION_IS_RELEASE 1 +#define NODE_PATCH_VERSION 14 +#define NODE_VERSION_IS_RELEASE 0 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n) From 90b785c09abfe07746e813f258b5654e1de86d9c Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Fri, 16 Mar 2012 13:43:00 +1100 Subject: [PATCH 9/9] doc: fix # links from (and within) api/fs --- doc/api/fs.markdown | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/api/fs.markdown b/doc/api/fs.markdown index 5b657a7cd9..ebe2e47fd0 100644 --- a/doc/api/fs.markdown +++ b/doc/api/fs.markdown @@ -137,7 +137,7 @@ Synchronous lchmod(2). ## fs.stat(path, [callback]) Asynchronous stat(2). The callback gets two arguments `(err, stats)` where -`stats` is a [fs.Stats](#fs.Stats) object. See the [fs.Stats](#fs.Stats) +`stats` is a [fs.Stats](#fs_class_fs_stats) object. See the [fs.Stats](#fs_class_fs_stats) section below for more information. ## fs.lstat(path, [callback]) @@ -552,7 +552,7 @@ An example to read the last 10 bytes of a file which is 100 bytes long: ## Class: fs.ReadStream -`ReadStream` is a [Readable Stream](stream.html#readable_stream). +`ReadStream` is a [Readable Stream](stream.html#stream_readable_stream). ### Event: 'open' @@ -578,7 +578,7 @@ default mode `w`. ## fs.WriteStream -`WriteStream` is a [Writable Stream](stream.html#writable_stream). +`WriteStream` is a [Writable Stream](stream.html#stream_writable_stream). ### Event: 'open' @@ -605,7 +605,7 @@ Stop watching for changes on the given `fs.FSWatcher`. * `filename` {String} The filename that changed (if relevant/available) Emitted when something changes in a watched directory or file. -See more details in [fs.watch](#fs.watch). +See more details in [fs.watch](#fs_fs_watch_filename_options_listener). ### Event: 'error'