[infra] Update python2 to python3 run_binary_size_analysis script.

Fixes https://github.com/flutter/flutter/issues/80727

Change-Id: Id8c2c649c89c8e0710be9b7f9635d5aa0b800879
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/196022
Commit-Queue: Alexander Aprelev <aam@google.com>
Reviewed-by: Zach Anderson <zra@google.com>
diff --git a/runtime/third_party/binary_size/src/elf_symbolizer.py b/runtime/third_party/binary_size/src/elf_symbolizer.py
index 86a0a07..5154a9b 100644
--- a/runtime/third_party/binary_size/src/elf_symbolizer.py
+++ b/runtime/third_party/binary_size/src/elf_symbolizer.py
@@ -8,7 +8,7 @@
 import multiprocessing
 import os
 import posixpath
-import Queue
+import queue
 import re
 import subprocess
 import sys
@@ -241,7 +241,7 @@
             # Objects required to handle the addr2line subprocess.
             self._proc = None  # Subprocess.Popen(...) instance.
             self._thread = None  # Threading.thread instance.
-            self._out_queue = None  # Queue.Queue instance (for buffering a2l stdout).
+            self._out_queue = None  # queue.Queue instance (for buffering a2l stdout).
             self._RestartAddr2LineProcess()
 
         def EnqueueRequest(self, addr, callback_arg):
@@ -282,7 +282,7 @@
 
                     try:
                         lines = self._out_queue.get(block=True, timeout=0.25)
-                    except Queue.Empty:
+                    except queue.Empty:
                         # On timeout (1/4 s.) repeat the inner loop and check if either the
                         # addr2line process did crash or we waited its output for too long.
                         continue
@@ -303,7 +303,7 @@
             while True:
                 try:
                     lines = self._out_queue.get_nowait()
-                except Queue.Empty:
+                except queue.Empty:
                     break
                 self._ProcessSymbolOutput(lines)
 
@@ -328,7 +328,7 @@
             self._proc = None
 
         def _WriteToA2lStdin(self, addr):
-            self._proc.stdin.write('%s\n' % hex(addr))
+            self._proc.stdin.write(('%s\n' % hex(addr)).encode())
             if self._symbolizer.inlines:
                 # In the case of inlines we output an extra blank line, which causes
                 # addr2line to emit a (??,??:0) tuple that we use as a boundary marker.
@@ -396,7 +396,7 @@
             # The only reason of existence of this Queue (and the corresponding
             # Thread below) is the lack of a subprocess.stdout.poll_avail_lines().
             # Essentially this is a pipe able to extract a couple of lines atomically.
-            self._out_queue = Queue.Queue()
+            self._out_queue = queue.Queue()
 
             # Start the underlying addr2line process in line buffered mode.
 
@@ -408,7 +408,6 @@
                 cmd += ['--inlines']
             self._proc = subprocess.Popen(
                 cmd,
-                bufsize=1,
                 stdout=subprocess.PIPE,
                 stdin=subprocess.PIPE,
                 stderr=sys.stderr,
@@ -442,8 +441,8 @@
             try:
                 lines_for_one_symbol = []
                 while True:
-                    line1 = process_pipe.readline().rstrip('\r\n')
-                    line2 = process_pipe.readline().rstrip('\r\n')
+                    line1 = process_pipe.readline().decode().rstrip('\r\n')
+                    line2 = process_pipe.readline().decode().rstrip('\r\n')
                     if not line1 or not line2:
                         break
                     inline_has_more_lines = inlines and (
diff --git a/runtime/third_party/binary_size/src/run_binary_size_analysis.py b/runtime/third_party/binary_size/src/run_binary_size_analysis.py
index ac41f4a..dfd5b97 100755
--- a/runtime/third_party/binary_size/src/run_binary_size_analysis.py
+++ b/runtime/third_party/binary_size/src/run_binary_size_analysis.py
@@ -60,16 +60,16 @@
         no_path_bucket = root_children[NAME_NO_PATH_BUCKET]
         old_children = no_path_bucket[NODE_CHILDREN_KEY]
         count = 0
-        for symbol_type, symbol_bucket in old_children.iteritems():
+        for symbol_type, symbol_bucket in old_children.items():
             count += len(symbol_bucket[NODE_CHILDREN_KEY])
         if count > BIG_BUCKET_LIMIT:
             new_children = {}
             no_path_bucket[NODE_CHILDREN_KEY] = new_children
             current_bucket = None
             index = 0
-            for symbol_type, symbol_bucket in old_children.iteritems():
+            for symbol_type, symbol_bucket in old_children.items():
                 for symbol_name, value in symbol_bucket[
-                        NODE_CHILDREN_KEY].iteritems():
+                        NODE_CHILDREN_KEY].items():
                     if index % BIG_BUCKET_LIMIT == 0:
                         group_no = (index / BIG_BUCKET_LIMIT) + 1
                         current_bucket = _MkChild(
@@ -89,7 +89,7 @@
     if NODE_CHILDREN_KEY in node:
         largest_list_len = len(node[NODE_CHILDREN_KEY])
         child_list = []
-        for child in node[NODE_CHILDREN_KEY].itervalues():
+        for child in node[NODE_CHILDREN_KEY].values():
             child_largest_list_len = MakeChildrenDictsIntoLists(child)
             if child_largest_list_len > largest_list_len:
                 largest_list_len = child_largest_list_len
@@ -305,7 +305,8 @@
         source_root_path=src_path)
     user_interrupted = False
     try:
-        for line in nm_output_lines:
+        for binary_line in nm_output_lines:
+            line = binary_line.decode()
             match = sNmPattern.match(line)
             if match:
                 location = match.group(5)
@@ -344,7 +345,8 @@
     symbol_path_origin_dir = os.path.dirname(os.path.abspath(library))
 
     with open(outfile, 'w') as out:
-        for line in nm_output_lines:
+        for binary_line in nm_output_lines:
+            line = binary_line.decode()
             match = sNmPattern.match(line)
             if match:
                 location = match.group(5)
@@ -496,7 +498,8 @@
   since we are right now transitioning from DWARF2 to newer formats,
   it's possible to have a mix of tools that are not compatible. Detect
   that and abort rather than produce meaningless output."""
-    tool_output = subprocess.check_output([addr2line_binary, '--version'])
+    tool_output = subprocess.check_output([addr2line_binary,
+                                           '--version']).decode()
     version_re = re.compile(r'^GNU [^ ]+ .* (\d+).(\d+).*?$', re.M)
     parsed_output = version_re.match(tool_output)
     major = int(parsed_output.group(1))
@@ -636,7 +639,7 @@
         # CPU power isn't the limiting factor. It's I/O limited, memory
         # bus limited and available-memory-limited. Too many processes and
         # the computer will run out of memory and it will be slow.
-        opts.jobs = max(2, min(4, str(multiprocessing.cpu_count())))
+        opts.jobs = max(2, min(4, multiprocessing.cpu_count()))
 
     if opts.addr2line_binary:
         assert os.path.isfile(opts.addr2line_binary)