Merge pull request #1 from srujzs/master
Migrate WebCore code to python 3
diff --git a/bindings/scripts/aggregate_generated_bindings.py b/bindings/scripts/aggregate_generated_bindings.py
index 5619f48..bb095c1 100755
--- a/bindings/scripts/aggregate_generated_bindings.py
+++ b/bindings/scripts/aggregate_generated_bindings.py
@@ -119,7 +119,7 @@
def write_content(content, output_file_name):
parent_path, file_name = os.path.split(output_file_name)
if not os.path.exists(parent_path):
- print 'Creating directory: %s' % parent_path
+ print('Creating directory: %s' % parent_path)
os.makedirs(parent_path)
with open(output_file_name, 'w') as f:
f.write(content)
diff --git a/bindings/scripts/blink_idl_lexer.py b/bindings/scripts/blink_idl_lexer.py
index c14b145..8437cb6 100644
--- a/bindings/scripts/blink_idl_lexer.py
+++ b/bindings/scripts/blink_idl_lexer.py
@@ -117,7 +117,7 @@
try:
outputdir = argv[1]
except IndexError as err:
- print 'Usage: %s OUTPUT_DIR' % argv[0]
+ print('Usage: %s OUTPUT_DIR' % argv[0])
return 1
# Important: rewrite_tables=True causes the cache file to be deleted if it
# exists, thus making sure that PLY doesn't load it instead of regenerating
diff --git a/bindings/scripts/blink_idl_parser.py b/bindings/scripts/blink_idl_parser.py
index be8030c..810182a 100644
--- a/bindings/scripts/blink_idl_parser.py
+++ b/bindings/scripts/blink_idl_parser.py
@@ -72,8 +72,6 @@
from idl_parser.idl_parser import ParseFile as parse_file
from blink_idl_lexer import BlinkIDLLexer
-import blink_idl_lexer
-
class BlinkIDLParser(IDLParser):
def __init__(self,
@@ -137,7 +135,7 @@
try:
outputdir = argv[1]
except IndexError as err:
- print 'Usage: %s OUTPUT_DIR' % argv[0]
+ print('Usage: %s OUTPUT_DIR' % argv[0])
return 1
blink_idl_lexer.main(argv)
# Important: rewrite_tables=True causes the cache file to be deleted if it
diff --git a/bindings/scripts/code_generator.py b/bindings/scripts/code_generator.py
index 66838f8..8f0c465 100644
--- a/bindings/scripts/code_generator.py
+++ b/bindings/scripts/code_generator.py
@@ -179,7 +179,7 @@
cache_dir = argv[1]
dummy_filename = argv[2]
except IndexError:
- print 'Usage: %s CACHE_DIR DUMMY_FILENAME' % argv[0]
+ print('Usage: %s CACHE_DIR DUMMY_FILENAME' % argv[0])
return 1
# Cache templates
diff --git a/bindings/scripts/code_generator_v8.py b/bindings/scripts/code_generator_v8.py
index c971aa9..abd7f5c 100644
--- a/bindings/scripts/code_generator_v8.py
+++ b/bindings/scripts/code_generator_v8.py
@@ -94,7 +94,7 @@
def resolve(self, definitions, definition_name):
"""Traverse definitions and resolves typedefs with the actual types."""
self.typedefs = {}
- for name, typedef in self.info_provider.typedefs.iteritems():
+ for name, typedef in self.info_provider.typedefs.items():
self.typedefs[name] = typedef.idl_type
self.additional_header_includes = set()
definitions.accept(self)
@@ -312,7 +312,7 @@
# idl_definitions.py. What we do instead is to resolve typedefs in
# _generate_container_code() whenever a new union file is generated.
self.typedefs = {}
- for name, typedef in self.info_provider.typedefs.iteritems():
+ for name, typedef in self.info_provider.typedefs.items():
self.typedefs[name] = typedef.idl_type
def _generate_container_code(self, union_type):
@@ -404,7 +404,7 @@
if not callback_functions:
return ()
outputs = set()
- for callback_function_dict in callback_functions.itervalues():
+ for callback_function_dict in callback_functions.values():
if callback_function_dict['component_dir'] != self.target_component:
continue
callback_function = callback_function_dict['callback_function']
diff --git a/bindings/scripts/code_generator_web_agent_api_test.py b/bindings/scripts/code_generator_web_agent_api_test.py
index 7f7b96e..b5260b8 100644
--- a/bindings/scripts/code_generator_web_agent_api_test.py
+++ b/bindings/scripts/code_generator_web_agent_api_test.py
@@ -52,7 +52,7 @@
def make_stub_interfaces_info(self, classes_to_paths):
result = {}
- for class_name, path in classes_to_paths.iteritems():
+ for class_name, path in classes_to_paths.items():
result[class_name] = {'include_path': path}
return result
diff --git a/bindings/scripts/compute_global_objects.py b/bindings/scripts/compute_global_objects.py
index ececf61..df0b6cb 100755
--- a/bindings/scripts/compute_global_objects.py
+++ b/bindings/scripts/compute_global_objects.py
@@ -51,7 +51,7 @@
def dict_union(dicts):
- return dict((k, v) for d in dicts for k, v in d.iteritems())
+ return dict((k, v) for d in dicts for k, v in d.items())
def idl_file_to_global_names(idl_filename):
@@ -68,7 +68,7 @@
interface_name = get_first_interface_name_from_idl(idl_file_contents)
global_keys = GLOBAL_EXTENDED_ATTRIBUTES.intersection(
- extended_attributes.iterkeys())
+ iter(extended_attributes.keys()))
if not global_keys:
return
if len(global_keys) > 1:
diff --git a/bindings/scripts/compute_interfaces_info_individual.py b/bindings/scripts/compute_interfaces_info_individual.py
index 98fee87..0cd6ec1 100755
--- a/bindings/scripts/compute_interfaces_info_individual.py
+++ b/bindings/scripts/compute_interfaces_info_individual.py
@@ -140,10 +140,21 @@
def get_unforgeable_attributes_from_definition(definition):
+ # Legacy Python 2 way to sort lists. Group by type, and then sort by value.
+ class MultitypeSortKey:
+ def __init__(self, value):
+ self.value = value
+
+ def __lt__(self, other):
+ try:
+ return self.value < other.value
+ except TypeError:
+ return str(type(self)) < str(type(other))
if 'Unforgeable' in definition.extended_attributes:
- return sorted(definition.attributes)
- return sorted(attribute for attribute in definition.attributes
- if 'Unforgeable' in attribute.extended_attributes)
+ return sorted(definition.attributes, key=MultitypeSortKey)
+ return sorted([attribute for attribute in definition.attributes
+ if 'Unforgeable' in attribute.extended_attributes],
+ key=MultitypeSortKey)
def collect_union_types_from_definitions(definitions):
@@ -227,7 +238,7 @@
this_union_types = collect_union_types_from_definitions(definitions)
self.union_types.update(this_union_types)
self.typedefs.update(definitions.typedefs)
- for callback_function_name, callback_function in definitions.callback_functions.iteritems():
+ for callback_function_name, callback_function in definitions.callback_functions.items():
# Set 'component_dir' to specify a directory that callback function files belong to
self.callback_functions[callback_function_name] = {
'callback_function': callback_function,
@@ -235,14 +246,14 @@
'full_path': os.path.realpath(idl_filename),
}
# Check enum duplication.
- for enum in definitions.enumerations.values():
+ for enum in list(definitions.enumerations.values()):
if not self.check_enum_consistency(enum):
raise Exception('Enumeration "%s" is defined more than once '
'with different valid values' % enum.name)
self.enumerations.update(definitions.enumerations)
if definitions.interfaces:
- definition = next(definitions.interfaces.itervalues())
+ definition = next(iter(definitions.interfaces.values()))
interface_info = {
'is_callback_interface': definition.is_callback,
'is_dictionary': False,
@@ -256,7 +267,7 @@
'referenced_interfaces': get_put_forward_interfaces_from_definition(definition),
}
elif definitions.dictionaries:
- definition = next(definitions.dictionaries.itervalues())
+ definition = next(iter(definitions.dictionaries.values()))
interface_info = {
'is_callback_interface': False,
'is_dictionary': True,
@@ -337,7 +348,7 @@
return {
'callback_functions': self.callback_functions,
'enumerations': dict((enum.name, enum.values)
- for enum in self.enumerations.values()),
+ for enum in list(self.enumerations.values())),
'typedefs': self.typedefs,
'union_types': self.union_types,
}
diff --git a/bindings/scripts/compute_interfaces_info_overall.py b/bindings/scripts/compute_interfaces_info_overall.py
index 2791a9a..ad1202d 100755
--- a/bindings/scripts/compute_interfaces_info_overall.py
+++ b/bindings/scripts/compute_interfaces_info_overall.py
@@ -128,12 +128,12 @@
Needed for merging partial_interface_files across components.
"""
- for key, value in other.iteritems():
+ for key, value in other.items():
if key not in existing:
existing[key] = value
continue
existing_value = existing[key]
- for inner_key, inner_value in value.iteritems():
+ for inner_key, inner_value in value.items():
existing_value[inner_key].extend(inner_value)
@@ -171,7 +171,7 @@
garbage_collected_interfaces = set()
callback_interfaces = set()
- for interface_name, interface_info in interfaces_info.iteritems():
+ for interface_name, interface_info in interfaces_info.items():
component_dirs[interface_name] = idl_filename_to_component(interface_info['full_path'])
if interface_info['ancestors']:
@@ -209,11 +209,11 @@
partial_interface_files, info['partial_interface_files'])
# Record inheritance information individually
- for interface_name, interface_info in interfaces_info.iteritems():
+ for interface_name, interface_info in interfaces_info.items():
extended_attributes = interface_info['extended_attributes']
inherited_extended_attributes_by_interface[interface_name] = dict(
(key, value)
- for key, value in extended_attributes.iteritems()
+ for key, value in extended_attributes.items()
if key in INHERITED_EXTENDED_ATTRIBUTES)
parent = interface_info['parent']
if parent:
@@ -231,14 +231,14 @@
# to implement*ing* interface (lhs of 'implements').
# Note that moving an 'implements' statement between implementing and
# implemented files does not change the info (or hence cause a rebuild)!
- for right_interface_name, interface_info in interfaces_info.iteritems():
+ for right_interface_name, interface_info in interfaces_info.items():
for left_interface_name in interface_info['implemented_by_interfaces']:
interfaces_info[left_interface_name]['implements_interfaces'].append(right_interface_name)
del interface_info['implemented_by_interfaces']
# An IDL file's dependencies are partial interface files that extend it,
# and files for other interfaces that this interfaces implements.
- for interface_name, interface_info in interfaces_info.iteritems():
+ for interface_name, interface_info in interfaces_info.items():
partial_interface_paths = partial_interface_files[interface_name]
partial_interfaces_full_paths = partial_interface_paths['full_paths']
# Partial interface definitions each need an include, as they are
@@ -296,7 +296,7 @@
})
# Clean up temporary private information
- for interface_info in interfaces_info.itervalues():
+ for interface_info in interfaces_info.values():
del interface_info['extended_attributes']
del interface_info['union_types']
del interface_info['is_legacy_treat_as_partial_interface']
diff --git a/bindings/scripts/generate_conditional_features.py b/bindings/scripts/generate_conditional_features.py
index 0028e8a..42ed635 100644
--- a/bindings/scripts/generate_conditional_features.py
+++ b/bindings/scripts/generate_conditional_features.py
@@ -71,7 +71,7 @@
implements = definitions.implements
# There should only be a single interface defined in an IDL file. Return it.
assert len(interfaces) == 1
- return (interfaces.values()[0], implements)
+ return (list(interfaces.values())[0], implements)
def interface_is_global(interface):
@@ -169,7 +169,7 @@
'is_global': interface_info.is_global,
'v8_class': interface_info.v8_class,
'installers': get_install_functions([interface_info], feature_names)}
- for interface_info, feature_names in features_for_type.items()]
+ for interface_info, feature_names in list(features_for_type.items())]
context['installers_by_interface'].sort(key=lambda x: x['name'])
# For each conditional feature, collect a list of bindings installation
@@ -178,7 +178,7 @@
{'name': feature_name,
'name_constant': 'OriginTrials::k%sTrialName' % feature_name,
'installers': get_install_functions(interfaces, [feature_name])}
- for feature_name, interfaces in types_for_feature.items()]
+ for feature_name, interfaces in list(types_for_feature.items())]
context['installers_by_feature'].sort(key=lambda x: x['name'])
return context
@@ -240,7 +240,7 @@
info_provider = create_component_info_provider(
os.path.normpath(options.info_dir), options.target_component.lower())
- idl_filenames = map(str.strip, open(options.idl_files_list))
+ idl_filenames = list(map(str.strip, open(options.idl_files_list)))
generate_conditional_features(info_provider, options, idl_filenames)
return 0
diff --git a/bindings/scripts/generate_global_constructors.py b/bindings/scripts/generate_global_constructors.py
index c068fbd..fb7714b 100755
--- a/bindings/scripts/generate_global_constructors.py
+++ b/bindings/scripts/generate_global_constructors.py
@@ -106,7 +106,7 @@
def generate_global_constructors_list(interface_name, extended_attributes):
extended_attributes_list = [
name + (('=' + extended_attributes[name]) if extended_attributes[name] else '')
- for name in 'RuntimeEnabled', 'OriginTrialEnabled', 'ContextEnabled', 'SecureContext'
+ for name in ('RuntimeEnabled', 'OriginTrialEnabled', 'ContextEnabled', 'SecureContext')
if name in extended_attributes]
if extended_attributes_list:
extended_string = '[%s] ' % ', '.join(extended_attributes_list)
@@ -170,7 +170,7 @@
record_global_constructors(idl_filename)
# Check for [Exposed] / [Global] mismatch.
- known_global_names = EXPOSED_EXECUTION_CONTEXT_METHOD.keys()
+ known_global_names = list(EXPOSED_EXECUTION_CONTEXT_METHOD.keys())
exposed_global_names = frozenset(global_name_to_constructors)
if not exposed_global_names.issubset(known_global_names):
unknown_global_names = exposed_global_names.difference(known_global_names)
diff --git a/bindings/scripts/generate_init_partial_interfaces.py b/bindings/scripts/generate_init_partial_interfaces.py
index 63b782e..053b10a 100755
--- a/bindings/scripts/generate_init_partial_interfaces.py
+++ b/bindings/scripts/generate_init_partial_interfaces.py
@@ -65,10 +65,10 @@
for file_path in file_paths:
if not file_path.endswith('.idl'):
- print 'WARNING: non-IDL file passed: "%s"' % file_path
+ print('WARNING: non-IDL file passed: "%s"' % file_path)
continue
if not os.path.exists(file_path):
- print 'WARNING: file not found: "%s"' % file_path
+ print('WARNING: file not found: "%s"' % file_path)
continue
idl_file_contents = get_file_contents(file_path)
diff --git a/bindings/scripts/generate_v8_context_snapshot_external_references.py b/bindings/scripts/generate_v8_context_snapshot_external_references.py
index 1fa3fae..b84db55 100644
--- a/bindings/scripts/generate_v8_context_snapshot_external_references.py
+++ b/bindings/scripts/generate_v8_context_snapshot_external_references.py
@@ -160,7 +160,7 @@
target_definitions = definitions[component]
interfaces = target_definitions.interfaces
first_name = target_definitions.first_name
- if first_name in interfaces.keys():
+ if first_name in list(interfaces.keys()):
interface = interfaces[first_name]
self._process_interface(interface, component, interfaces)
diff --git a/bindings/scripts/idl_compiler.py b/bindings/scripts/idl_compiler.py
index d876a0c..d80f899 100755
--- a/bindings/scripts/idl_compiler.py
+++ b/bindings/scripts/idl_compiler.py
@@ -80,11 +80,10 @@
return options, idl_filename
-class IdlCompiler(object):
+class IdlCompiler(object, metaclass=abc.ABCMeta):
"""The IDL Compiler.
"""
- __metaclass__ = abc.ABCMeta
def __init__(self, output_directory, cache_directory=None,
code_generator_class=None, snake_case_generated_files=False,
diff --git a/bindings/scripts/idl_definitions.py b/bindings/scripts/idl_definitions.py
index f2f0e54..112f4eb 100644
--- a/bindings/scripts/idl_definitions.py
+++ b/bindings/scripts/idl_definitions.py
@@ -78,13 +78,12 @@
# TypedObject
################################################################################
-class TypedObject(object):
+class TypedObject(object, metaclass=abc.ABCMeta):
"""Object with a type, such as an Attribute or Operation (return value).
The type can be an actual type, or can be a typedef, which must be resolved
by the TypedefResolver before passing data to the code generator.
"""
- __metaclass__ = abc.ABCMeta
idl_type_attributes = ('idl_type',)
@@ -136,22 +135,22 @@
def accept(self, visitor):
visitor.visit_definitions(self)
- for interface in self.interfaces.itervalues():
+ for interface in self.interfaces.values():
interface.accept(visitor)
- for callback_function in self.callback_functions.itervalues():
+ for callback_function in self.callback_functions.values():
callback_function.accept(visitor)
- for dictionary in self.dictionaries.itervalues():
+ for dictionary in self.dictionaries.values():
dictionary.accept(visitor)
- for enumeration in self.enumerations.itervalues():
+ for enumeration in self.enumerations.values():
enumeration.accept(visitor)
for implement in self.implements:
implement.accept(visitor)
- for typedef in self.typedefs.itervalues():
+ for typedef in self.typedefs.values():
typedef.accept(visitor)
def update(self, other):
"""Update with additional IdlDefinitions."""
- for interface_name, new_interface in other.interfaces.iteritems():
+ for interface_name, new_interface in other.interfaces.items():
if not new_interface.is_partial:
# Add as new interface
self.interfaces[interface_name] = new_interface
@@ -364,7 +363,7 @@
else:
raise ValueError('Unrecognized node class: %s' % child_class)
- if len(filter(None, [self.iterable, self.maplike, self.setlike])) > 1:
+ if len([_f for _f in [self.iterable, self.maplike, self.setlike] if _f]) > 1:
raise ValueError('Interface can only have one of iterable<>, maplike<> and setlike<>.')
# TODO(rakuco): This validation logic should be in v8_interface according to bashi@.
diff --git a/bindings/scripts/idl_reader.py b/bindings/scripts/idl_reader.py
index 2bd219d..58604f2 100644
--- a/bindings/scripts/idl_reader.py
+++ b/bindings/scripts/idl_reader.py
@@ -56,8 +56,8 @@
definitions. There is no filename convention in this case.
- Otherwise, an IDL file is invalid.
"""
- targets = (definitions.interfaces.values() +
- definitions.dictionaries.values())
+ targets = (list(definitions.interfaces.values()) +
+ list(definitions.dictionaries.values()))
number_of_targets = len(targets)
if number_of_targets > 1:
raise Exception(
@@ -127,7 +127,7 @@
validate_blink_idl_definitions(idl_filename, idl_file_basename, definitions)
else:
if len(definitions.interfaces) > 1:
- print '----- Supplemental interfaces %s' % len(definitions.interfaces)
+ print('----- Supplemental interfaces %s' % len(definitions.interfaces))
# Validate extended attributes
if not self.extended_attribute_validator:
diff --git a/bindings/scripts/idl_types.py b/bindings/scripts/idl_types.py
index 05298fe..d788508 100644
--- a/bindings/scripts/idl_types.py
+++ b/bindings/scripts/idl_types.py
@@ -348,7 +348,7 @@
return True
def single_matching_member_type(self, predicate):
- matching_types = filter(predicate, self.flattened_member_types)
+ matching_types = list(filter(predicate, self.flattened_member_types))
if len(matching_types) > 1:
raise ValueError('%s is ambiguous.' % self.name)
return matching_types[0] if matching_types else None
diff --git a/bindings/scripts/idl_validator.py b/bindings/scripts/idl_validator.py
index e75099e..5b6914c 100644
--- a/bindings/scripts/idl_validator.py
+++ b/bindings/scripts/idl_validator.py
@@ -52,7 +52,7 @@
def validate_extended_attributes(self, definitions):
# FIXME: this should be done when parsing the file, rather than after.
- for interface in definitions.interfaces.itervalues():
+ for interface in definitions.interfaces.values():
self.validate_extended_attributes_node(interface)
for attribute in interface.attributes:
self.validate_extended_attributes_node(attribute)
@@ -62,7 +62,7 @@
self.validate_extended_attributes_node(argument)
def validate_extended_attributes_node(self, node):
- for name, values_string in node.extended_attributes.iteritems():
+ for name, values_string in node.extended_attributes.items():
self.validate_name_values_string(name, values_string)
def validate_name_values_string(self, name, values_string):
@@ -96,7 +96,7 @@
line = line.strip()
if not line or line.startswith('#'):
continue
- name, _, values_string = map(str.strip, line.partition('='))
+ name, _, values_string = list(map(str.strip, line.partition('=')))
value_list = [value.strip() for value in values_string.split('|')]
yield name, value_list
diff --git a/bindings/scripts/interface_dependency_resolver.py b/bindings/scripts/interface_dependency_resolver.py
index 239f11e..6e266d3 100644
--- a/bindings/scripts/interface_dependency_resolver.py
+++ b/bindings/scripts/interface_dependency_resolver.py
@@ -103,7 +103,7 @@
'this definition: %s, because this should '
'have a dictionary.' % definitions.idl_name)
- target_interface = next(definitions.interfaces.itervalues())
+ target_interface = next(iter(definitions.interfaces.values()))
interface_name = target_interface.name
interface_info = self.interfaces_info[interface_name]
@@ -161,7 +161,7 @@
dependency_definitions = reader.read_idl_file(dependency_idl_filename)
dependency_component = idl_filename_to_component(dependency_idl_filename)
- dependency_interface = next(dependency_definitions.interfaces.itervalues())
+ dependency_interface = next(iter(dependency_definitions.interfaces.values()))
transfer_extended_attributes(dependency_interface,
dependency_idl_filename)
@@ -307,7 +307,7 @@
'ImplementedAs', dependency_interface.name))
def update_attributes(attributes, extras):
- for key, value in extras.items():
+ for key, value in list(extras.items()):
if key not in attributes:
attributes[key] = value
@@ -342,8 +342,8 @@
cpp_includes.update(interface.get('cpp_includes', {}).get(component, {}))
return unforgeable_attributes, referenced_interfaces, cpp_includes
- for component, definitions in resolved_definitions.iteritems():
- for interface_name, interface in definitions.interfaces.iteritems():
+ for component, definitions in resolved_definitions.items():
+ for interface_name, interface in definitions.interfaces.items():
interface_info = interfaces_info[interface_name]
inherited_unforgeable_attributes, referenced_interfaces, cpp_includes = collect_unforgeable_attributes_in_ancestors(interface_info.get('parent'), component)
# This loop may process the same interface many times, so it's
diff --git a/bindings/scripts/overload_set_algorithm.py b/bindings/scripts/overload_set_algorithm.py
index 6425a84..a2505a9 100644
--- a/bindings/scripts/overload_set_algorithm.py
+++ b/bindings/scripts/overload_set_algorithm.py
@@ -121,7 +121,7 @@
# Filter to only methods that are actually overloaded
method_counts = Counter(method['name'] for method in methods)
overloaded_method_names = set(name
- for name, count in method_counts.iteritems()
+ for name, count in method_counts.items()
if count > 1)
overloaded_methods = [method for method in methods
if method['name'] in overloaded_method_names]
diff --git a/bindings/scripts/utilities.py b/bindings/scripts/utilities.py
index 421cd65..3656c95 100644
--- a/bindings/scripts/utilities.py
+++ b/bindings/scripts/utilities.py
@@ -8,7 +8,7 @@
"""
import os
-import cPickle as pickle
+import pickle as pickle
import re
import shlex
import string
@@ -190,8 +190,8 @@
@property
def callback_functions(self):
- return dict(self._component_info_core['callback_functions'].items() +
- self._component_info_modules['callback_functions'].items())
+ return dict(list(self._component_info_core['callback_functions'].items()) +
+ list(self._component_info_modules['callback_functions'].items()))
@property
def specifier_for_export(self):
@@ -212,7 +212,7 @@
|target| will be updated with |diff|. Part of |diff| may be re-used in
|target|.
"""
- for key, value in diff.iteritems():
+ for key, value in diff.items():
if key not in target:
target[key] = value
elif type(value) == dict:
@@ -398,7 +398,7 @@
# Discard empty parts, which may exist due to trailing comma
if extended_attribute.strip()]
for part in parts:
- name, _, value = map(string.strip, part.partition('='))
+ name, _, value = list(map(string.strip, part.partition('=')))
extended_attributes[name] = value
return extended_attributes
diff --git a/bindings/scripts/v8_dictionary.py b/bindings/scripts/v8_dictionary.py
index 0909d11..0440cc2 100644
--- a/bindings/scripts/v8_dictionary.py
+++ b/bindings/scripts/v8_dictionary.py
@@ -167,7 +167,7 @@
if duplicated_member and duplicated_member != member:
raise Exception('Member name conflict: %s' % cpp_name)
members_dict[cpp_name] = member
- return sorted(members_dict.values(), key=lambda member: member['cpp_name'])
+ return sorted(list(members_dict.values()), key=lambda member: member['cpp_name'])
includes.clear()
header_forward_decls = set()
diff --git a/bindings/scripts/v8_interface.py b/bindings/scripts/v8_interface.py
index c8b3245..c66a80c 100644
--- a/bindings/scripts/v8_interface.py
+++ b/bindings/scripts/v8_interface.py
@@ -1134,9 +1134,9 @@
# Extract argument and IDL type to simplify accessing these in each loop.
arguments = [method['arguments'][index] for method in methods]
- arguments_methods = zip(arguments, methods)
+ arguments_methods = list(zip(arguments, methods))
idl_types = [argument['idl_type_object'] for argument in arguments]
- idl_types_methods = zip(idl_types, methods)
+ idl_types_methods = list(zip(idl_types, methods))
# We can’t do a single loop through all methods or simply sort them, because
# a method may be listed in multiple steps of the resolution algorithm, and
diff --git a/bindings/scripts/v8_methods.py b/bindings/scripts/v8_methods.py
index b199947..13201ad 100644
--- a/bindings/scripts/v8_methods.py
+++ b/bindings/scripts/v8_methods.py
@@ -477,7 +477,7 @@
% idl_type.name)
# Union container objects are "null" initially.
return '/* null default value */'
- if isinstance(default_value.value, basestring):
+ if isinstance(default_value.value, str):
member_type = idl_type.string_member_type
elif isinstance(default_value.value, (int, float)):
member_type = idl_type.numeric_member_type