kudu-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a...@apache.org
Subject [kudu] 02/02: [python] Fix Python 3 syntax issues
Date Thu, 16 Jan 2020 04:39:56 GMT
This is an automated email from the ASF dual-hosted git repository.

adar pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kudu.git

commit 8463663ca9d6b0f4440dc2d4d321de62b7b6f909
Author: Grant Henke <granthenke@apache.org>
AuthorDate: Wed Jan 15 10:23:35 2020 -0600

    [python] Fix Python 3 syntax issues
    
    This patch fixes many of the Python 3 incompatibilities in
    the various Kudu Python scripts while still maintiaining Python
    2 compatibility.
    
    Change-Id: I5d5e06ea6299032d84a76440afb42a70fd461abd
    Reviewed-on: http://gerrit.cloudera.org:8080/15037
    Reviewed-by: Bankim Bhavsar <bankim@cloudera.com>
    Reviewed-by: Adar Dembo <adar@cloudera.com>
    Tested-by: Adar Dembo <adar@cloudera.com>
---
 build-support/build_source_release.py              |  2 +-
 build-support/check_compatibility.py               |  4 ++--
 build-support/clang_tidy_gerrit.py                 | 25 +++++++++++-----------
 build-support/dist_test.py                         |  6 +++---
 build-support/iwyu.py                              |  2 +-
 .../relocate_binaries_for_mini_cluster.py          |  6 +++---
 build-support/parse_test_failure.py                | 12 +++++++----
 build-support/push_to_asf.py                       |  2 +-
 build-support/release/check-rat-report.py          |  2 +-
 build-support/run_dist_test.py                     |  4 ++--
 build-support/test_result_server.py                |  6 +++++-
 .../python/graphite-kudu/kudu/kudu_graphite.py     |  2 +-
 src/kudu/benchmarks/wal_hiccup-parser.py           |  6 +++---
 src/kudu/experiments/merge-test.py                 |  4 ++++
 src/kudu/scripts/get-job-stats-from-mysql.py       |  4 ++--
 src/kudu/scripts/graph-metrics.py                  |  4 ++--
 src/kudu/scripts/max_skew_estimate.py              |  8 +++++--
 src/kudu/scripts/parse_metrics_log.py              | 10 ++++-----
 src/kudu/scripts/write-jobs-stats-to-mysql.py      |  2 +-
 19 files changed, 63 insertions(+), 48 deletions(-)

diff --git a/build-support/build_source_release.py b/build-support/build_source_release.py
index ad53316..cea4104 100755
--- a/build-support/build_source_release.py
+++ b/build-support/build_source_release.py
@@ -187,7 +187,7 @@ def main():
   run_rat(tarball_path)
 
   print(Colors.GREEN + "Release successfully generated!" + Colors.RESET)
-  print
+  print()
 
 
 if __name__ == "__main__":
diff --git a/build-support/check_compatibility.py b/build-support/check_compatibility.py
index 61d5661..fbf79ea 100755
--- a/build-support/check_compatibility.py
+++ b/build-support/check_compatibility.py
@@ -162,9 +162,9 @@ def run_java_acc(src_name, src, dst_name, dst):
                "\n".join(dst_jars))
 
   annotations_path = os.path.join(get_scratch_dir(), "annotations.txt")
-  with file(annotations_path, "w") as f:
+  with open(annotations_path, "w") as f:
     for ann in PUBLIC_ANNOTATIONS:
-      print >>f,  ann
+      print(ann, file=f)
 
   java_acc_path = os.path.join(get_java_acc_dir(), "japi-compliance-checker.pl")
 
diff --git a/build-support/clang_tidy_gerrit.py b/build-support/clang_tidy_gerrit.py
index c10a0e0..d7d1644 100755
--- a/build-support/clang_tidy_gerrit.py
+++ b/build-support/clang_tidy_gerrit.py
@@ -143,10 +143,10 @@ def post_comments(revision_url_base, gerrit_json_obj):
                       auth=(GERRIT_USER, GERRIT_PASSWORD),
                       data=json.dumps(gerrit_json_obj),
                       headers={'Content-Type': 'application/json'})
-    print "Response:"
-    print r.headers
-    print r.status_code
-    print r.text
+    print("Response:")
+    print(r.headers)
+    print(r.status_code)
+    print(r.text)
 
 
 class TestClangTidyGerrit(unittest.TestCase):
@@ -182,7 +182,6 @@ No relevant changes found.
         self.assertEqual("src/kudu/blah.cc", parsed[1]['path'])
 
 
-
 if __name__ == "__main__":
     # Basic setup and argument parsing.
     init_logging()
@@ -197,31 +196,31 @@ if __name__ == "__main__":
     args = parser.parse_args()
 
     if args.rev_range and not args.no_gerrit:
-        print >>sys.stderr, "--rev-range works only with --no-gerrit"
+        print("--rev-range works only with --no-gerrit", file=sys.stderr)
         sys.exit(1)
 
     # Find the gerrit revision URL, if applicable.
     if not args.no_gerrit:
         revision_url = get_gerrit_revision_url(args.rev)
-        print revision_url
+        print(revision_url)
 
     # Run clang-tidy and parse the output.
     clang_output = run_tidy(args.rev, args.rev_range)
     logging.info("Clang output")
     logging.info(clang_output)
     if args.no_gerrit:
-        print >>sys.stderr, "Skipping gerrit"
+        print("Skipping gerrit", file=sys.stderr)
         sys.exit(0)
     logging.info("=" * 80)
     parsed = parse_clang_output(clang_output)
     if not parsed:
-        print >>sys.stderr, "No warnings"
+        print("No warnings", file=sys.stderr)
         sys.exit(0)
-    print "Parsed clang warnings:"
-    print json.dumps(parsed, indent=4)
+    print("Parsed clang warnings:")
+    print(json.dumps(parsed, indent=4))
 
     # Post the output as comments to the gerrit URL.
     gerrit_json_obj = create_gerrit_json_obj(parsed)
-    print "Will post to gerrit:"
-    print json.dumps(gerrit_json_obj, indent=4)
+    print("Will post to gerrit:")
+    print(json.dumps(gerrit_json_obj, indent=4))
     post_comments(revision_url, gerrit_json_obj)
diff --git a/build-support/dist_test.py b/build-support/dist_test.py
index 8636314..9721460 100755
--- a/build-support/dist_test.py
+++ b/build-support/dist_test.py
@@ -388,7 +388,7 @@ def create_task_json(staging,
   Alternatively, if 'retry_all_tests' is True, all tests will be retried.
   """
   tasks = []
-  with file(staging.archive_dump_path(), "r") as isolate_dump:
+  with open(staging.archive_dump_path(), "r") as isolate_dump:
     inmap = json.load(isolate_dump)
 
   # Some versions of 'isolate batcharchive' directly list the items in
@@ -415,7 +415,7 @@ def create_task_json(staging,
     sys.exit(1)
   outmap = {"tasks": tasks}
 
-  with file(staging.tasks_json_path(), "wt") as f:
+  with open(staging.tasks_json_path(), "wt") as f:
     json.dump(outmap, f)
 
 
@@ -655,7 +655,7 @@ def add_java_subparser(subparsers):
   loop.set_defaults(func=loop_java_test)
 
 def dump_base_deps(parser, options):
-  print json.dumps(get_base_deps(create_dependency_extractor()))
+  print(json.dumps(get_base_deps(create_dependency_extractor())))
 
 def add_internal_commands(subparsers):
   p = subparsers.add_parser('internal', help="[Internal commands not for users]")
diff --git a/build-support/iwyu.py b/build-support/iwyu.py
index ff1a6ed..06efbbb 100755
--- a/build-support/iwyu.py
+++ b/build-support/iwyu.py
@@ -173,7 +173,7 @@ def _do_iwyu(flags, paths):
   iwyu_output = _run_iwyu_tool(paths)
   if flags.dump_iwyu_output:
     logging.info("Dumping iwyu output to %s", flags.dump_iwyu_output)
-    with file(flags.dump_iwyu_output, "w") as f:
+    with open(flags.dump_iwyu_output, "w") as f:
       print(iwyu_output, file=f)
   stream = BytesIO(iwyu_output)
   fixer_flags = _get_fixer_flags(flags)
diff --git a/build-support/mini-cluster/relocate_binaries_for_mini_cluster.py b/build-support/mini-cluster/relocate_binaries_for_mini_cluster.py
index 87bb602..562c1f8 100755
--- a/build-support/mini-cluster/relocate_binaries_for_mini_cluster.py
+++ b/build-support/mini-cluster/relocate_binaries_for_mini_cluster.py
@@ -258,11 +258,11 @@ def prep_artifact_dirs(config):
   """
 
   if not os.path.exists(config[ARTIFACT_ROOT]):
-    os.makedirs(config[ARTIFACT_ROOT], mode=0755)
+    os.makedirs(config[ARTIFACT_ROOT], mode=0o755)
   if not os.path.exists(config[ARTIFACT_BIN_DIR]):
-    os.makedirs(config[ARTIFACT_BIN_DIR], mode=0755)
+    os.makedirs(config[ARTIFACT_BIN_DIR], mode=0o755)
   if not os.path.exists(config[ARTIFACT_LIB_DIR]):
-    os.makedirs(config[ARTIFACT_LIB_DIR], mode=0755)
+    os.makedirs(config[ARTIFACT_LIB_DIR], mode=0o755)
 
 def copy_file(src, dest):
   """
diff --git a/build-support/parse_test_failure.py b/build-support/parse_test_failure.py
index dcb6cfc..82abe26 100755
--- a/build-support/parse_test_failure.py
+++ b/build-support/parse_test_failure.py
@@ -289,7 +289,8 @@ class Test(unittest.TestCase):
       base, _ = os.path.splitext(child)
 
       p = LogParser()
-      p.parse_text(file(os.path.join(self._TEST_DIR, child)).read())
+      with open(os.path.join(self._TEST_DIR, child)) as f:
+        p.parse_text(f.read())
       self._do_test(p.text_failure_summary(), base + "-out.txt")
       self._do_test(p.xml_failure_summary(), base + "-out.xml")
 
@@ -297,10 +298,11 @@ class Test(unittest.TestCase):
     path = os.path.join(self._TEST_DIR, filename)
     if self.regenerate:
       print("Regenerating %s" % path)
-      with file(path, "w") as f:
+      with open(path, "w") as f:
         f.write(got_value)
     else:
-      self.assertEquals(got_value, file(path).read())
+      with open(path) as f:
+        self.assertEquals(got_value, f.read())
 
 
 def main():
@@ -312,10 +314,12 @@ def main():
   args = parser.parse_args()
 
   if args.path:
-    in_file = file(args.path)
+    in_file = open(args.path)
   else:
     in_file = sys.stdin
   log_text = in_file.read(MAX_MEMORY)
+  if in_file is not sys.stdin:
+    in_file.close()
   format = args.xml and 'xml' or 'text'
   sys.stdout.write(extract_failure_summary(log_text, format))
 
diff --git a/build-support/push_to_asf.py b/build-support/push_to_asf.py
index f8d0ece..cc19f8f 100755
--- a/build-support/push_to_asf.py
+++ b/build-support/push_to_asf.py
@@ -196,7 +196,7 @@ def do_update(branch, gerrit_sha, apache_sha):
   print(Colors.GREEN + "Running: " + Colors.RESET + " ".join(cmd))
   subprocess.check_call(cmd)
   print(Colors.GREEN + "Successfully updated %s to %s" % (branch, gerrit_sha) + Colors.RESET)
-  print
+  print()
 
 
 def main():
diff --git a/build-support/release/check-rat-report.py b/build-support/release/check-rat-report.py
index 213fd9b..4721452 100755
--- a/build-support/release/check-rat-report.py
+++ b/build-support/release/check-rat-report.py
@@ -53,5 +53,5 @@ for r in resources:
 if not all_ok:
     sys.exit(1)
 
-print 'OK'
+print('OK')
 sys.exit(0)
diff --git a/build-support/run_dist_test.py b/build-support/run_dist_test.py
index 7bd7620..f9b12c7 100755
--- a/build-support/run_dist_test.py
+++ b/build-support/run_dist_test.py
@@ -50,7 +50,7 @@ def is_elf_binary(path):
   if not os.path.isfile(path) or os.path.islink(path):
     return False
   try:
-    with file(path, "rb") as f:
+    with open(path, "rb") as f:
       magic = f.read(4)
       return magic == "\x7fELF"
   except:
@@ -186,7 +186,7 @@ def main():
     if not os.path.exists(test_tmpdir):
       os.makedirs(test_tmpdir)
     cmd = [find_java()] + args
-    stdout = stderr = file(os.path.join(test_logdir, "test-output.txt"), "w")
+    stdout = stderr = open(os.path.join(test_logdir, "test-output.txt"), "w")
   else:
     raise ValueError("invalid test language: " + options.test_language)
   logging.info("Running command: ", cmd)
diff --git a/build-support/test_result_server.py b/build-support/test_result_server.py
index 6f037bf..15e859e 100755
--- a/build-support/test_result_server.py
+++ b/build-support/test_result_server.py
@@ -56,9 +56,13 @@ import logging
 import MySQLdb
 import os
 import parse_test_failure
-from StringIO import StringIO
+from io import StringIO
 import threading
 import uuid
+try:
+  xrange  # For Python 2
+except NameError:
+  xrange = range  # For Python 3
 
 def percent_rate(num, denom):
   if denom == 0:
diff --git a/examples/python/graphite-kudu/kudu/kudu_graphite.py b/examples/python/graphite-kudu/kudu/kudu_graphite.py
index 1765303..4a7d347 100644
--- a/examples/python/graphite-kudu/kudu/kudu_graphite.py
+++ b/examples/python/graphite-kudu/kudu/kudu_graphite.py
@@ -299,6 +299,6 @@ class KuduFinder(object):
         try:
           for node in self._find_nodes_from_pattern(self.kudu_table, query.pattern):
               yield node
-        except Exception, e:
+        except Exception as e:
           log.exception(e)
           raise
diff --git a/src/kudu/benchmarks/wal_hiccup-parser.py b/src/kudu/benchmarks/wal_hiccup-parser.py
index 7ba3d5f..9da8f42 100755
--- a/src/kudu/benchmarks/wal_hiccup-parser.py
+++ b/src/kudu/benchmarks/wal_hiccup-parser.py
@@ -32,7 +32,7 @@ import sys
 
 def main():
     if len(sys.argv) != 2:
-        print "Usage: %s <log output>" % (sys.argv[0],)
+        print("Usage: %s <log output>" % (sys.argv[0],))
         return
     cols = list()
     cols_printed = False
@@ -48,9 +48,9 @@ def main():
             # End of a test result.
             elif "-------" in line and vals is not None:
                 if not cols_printed:
-                    print ",".join(cols)
+                    print(",".join(cols))
                     cols_printed = True
-                print ",".join(vals)
+                print(",".join(vals))
                 vals = None
 
             # Entry in a test result.
diff --git a/src/kudu/experiments/merge-test.py b/src/kudu/experiments/merge-test.py
index feddee3..419301c 100755
--- a/src/kudu/experiments/merge-test.py
+++ b/src/kudu/experiments/merge-test.py
@@ -27,6 +27,10 @@ import logging
 import random
 import time
 import unittest
+try:
+  xrange  # For Python 2
+except NameError:
+  xrange = range  # For Python 3
 
 # Adjustable experiment parameters.
 BLOCK_SIZE = 1000
diff --git a/src/kudu/scripts/get-job-stats-from-mysql.py b/src/kudu/scripts/get-job-stats-from-mysql.py
index 5748886..2fd062f 100644
--- a/src/kudu/scripts/get-job-stats-from-mysql.py
+++ b/src/kudu/scripts/get-job-stats-from-mysql.py
@@ -52,7 +52,7 @@ with con:
   days = sys.argv[2]
   cur.execute("select workload, runtime, build_number from kudu_perf_tpch where workload
like %s AND curr_date >= DATE_SUB(NOW(), INTERVAL %s DAY) and runtime != 0 ORDER BY workload,
build_number, curr_date", (workload, days))
   rows = cur.fetchall()
-  print 'workload', '\t', 'runtime', '\t', 'build_number'
+  print('workload', '\t', 'runtime', '\t', 'build_number')
   for row in rows:
-    print row[0], '\t', row[1], '\t', row[2]
+    print(row[0], '\t', row[1], '\t', row[2])
 
diff --git a/src/kudu/scripts/graph-metrics.py b/src/kudu/scripts/graph-metrics.py
index 2ed126c..853e2e4 100755
--- a/src/kudu/scripts/graph-metrics.py
+++ b/src/kudu/scripts/graph-metrics.py
@@ -69,9 +69,9 @@ def main():
   keys = get_keys(data)
 
   with sys.stdout as f:
-    print >>f, "\t".join(keys)
+    print("\t".join(keys), file=f)
     for row in data:
-      print >>f, "\t".join([str(row.get(k, 0)) for k in keys])
+      print("\t".join([str(row.get(k, 0)) for k in keys]), file=f)
 
 
 if __name__ == "__main__":
diff --git a/src/kudu/scripts/max_skew_estimate.py b/src/kudu/scripts/max_skew_estimate.py
index 841c75a..9dd4732 100755
--- a/src/kudu/scripts/max_skew_estimate.py
+++ b/src/kudu/scripts/max_skew_estimate.py
@@ -23,6 +23,10 @@
 import math
 import random
 import sys
+try:
+    xrange  # For Python 2
+except NameError:
+    xrange = range  # For Python 3
 
 # Replicates Random::ReservoirSample from kudu/util/random.h.
 def reservoir_sample(n, sample_size, avoid):
@@ -74,13 +78,13 @@ def generate_max_skew(num_servers, num_tablets, rf):
 def main():
     args = sys.argv
     if len(args) != 5:
-        print "max_skew_estimate.py <num trials> <num servers> <num_tablets>
<repl factor>"
+        print("max_skew_estimate.py <num trials> <num servers> <num_tablets>
<repl factor>")
         sys.exit(1)
     num_trials, num_servers, num_tablets, rf = int(args[1]), int(args[2]), int(args[3]),
int(args[4])
     skews = [generate_max_skew(num_servers, num_tablets, rf) for _ in xrange(num_trials)]
     skews.sort()
     for p in [5, 25, 50, 75, 99]:
-        print "%02d percentile: %d" % (p, percentile(skews, p))
+        print("{:02d} percentile: {:d}".format(p, percentile(skews, p)))
 
 if __name__ == "__main__":
     main()
diff --git a/src/kudu/scripts/parse_metrics_log.py b/src/kudu/scripts/parse_metrics_log.py
index f618010..af8c8c8 100644
--- a/src/kudu/scripts/parse_metrics_log.py
+++ b/src/kudu/scripts/parse_metrics_log.py
@@ -237,9 +237,9 @@ def process(aggregated_prev, aggregated_cur):
     stats = histogram_stats(aggregated_prev, aggregated_cur, metric)
     calc_vals.extend([stats['p50'], stats['p95'], stats['p99'], stats['p999'], stats['max']])
 
-  print (aggregated_cur['ts'] + aggregated_prev['ts'])/2, \
-        cache_ratio, \
-        " ".join(str(x) for x in calc_vals)
+  print((aggregated_cur['ts'] + aggregated_prev['ts'])/2,
+        cache_ratio,
+        " ".join(str(x) for x in calc_vals))
   return aggregated_cur
 
 def main(argv):
@@ -254,13 +254,13 @@ def main(argv):
     simple_headers.append(header + "_p999")
     simple_headers.append(header + "_max")
 
-  print "time cache_hit_ratio", " ".join(simple_headers)
+  print("time cache_hit_ratio", " ".join(simple_headers))
 
   for path in sorted(argv[1:]):
     if path.endswith(".gz"):
       f = gzip.GzipFile(path)
     else:
-      f = file(path)
+      f = open(path)
     for line_number, line in enumerate(f, start=1):
       # Only parse out the "metrics" lines.
       try:
diff --git a/src/kudu/scripts/write-jobs-stats-to-mysql.py b/src/kudu/scripts/write-jobs-stats-to-mysql.py
index a6b092a..0493a67 100644
--- a/src/kudu/scripts/write-jobs-stats-to-mysql.py
+++ b/src/kudu/scripts/write-jobs-stats-to-mysql.py
@@ -30,7 +30,7 @@ pwd = os.environ["MYSQLPWD"]
 db = os.environ["MYSQLDB"]
 
 con = mdb.connect(host, user, pwd, db)
-print "Connected to mysql"
+print("Connected to mysql")
 with con:
   cur = con.cursor()
   job_name = sys.argv[1]


Mime
View raw message