Skip to content

Commit c7a0aed

Browse files
lisroachfacebook-github-bot
authored andcommitted
Fbandroid/native to Python 3.
Reviewed By: dkgi Differential Revision: D35147434 fbshipit-source-id: bf0078e74e7101842101270bce407e38e114fbc1
1 parent 5b69781 commit c7a0aed

File tree

13 files changed

+97
-80
lines changed

13 files changed

+97
-80
lines changed

gen_simple_module.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def write_py_wrapper(base_64_bytes_blob, files, filename, tar_xz):
164164
compression_specific_api=_TAR_XZ_API if tar_xz else _ZIP_API,
165165
)
166166
)
167-
for key, val in files.items():
167+
for key, val in list(files.items()):
168168
f.write(f'{key} = _load("{os.path.basename(val)}")\n')
169169

170170

@@ -174,7 +174,7 @@ def main():
174174
key_val[: key_val.find("=")]: key_val[key_val.find("=") + 1 :]
175175
for key_val in args.inputs
176176
}
177-
base64_blob = compress_and_base_64(files.values(), args.tarxz)
177+
base64_blob = compress_and_base_64(list(files.values()), args.tarxz)
178178
write_py_wrapper(base64_blob, files, args.output, args.tarxz)
179179

180180

opt/basic-block/trace_analysis.py

Lines changed: 23 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -63,30 +63,38 @@ def main():
6363

6464
print("========Summary=========")
6565
if trace:
66-
print("Num of Methods: %d, Num of Blocks: %d" % (num_methods, num_blocks))
66+
print(("Num of Methods: %d, Num of Blocks: %d" % (num_methods, num_blocks)))
6767
print(
68-
"Blocks/Method: %.2f, Instructions/Block: %.2f"
69-
% (num_blocks / num_methods, num_instructions / num_blocks)
68+
(
69+
"Blocks/Method: %.2f, Instructions/Block: %.2f"
70+
% (num_blocks / num_methods, num_instructions / num_blocks)
71+
)
7072
)
71-
print("Average Degree: %.2f" % (fan_in / num_blocks))
72-
print("Number of Virtual Methods: %d" % (num_virtual))
73+
print(("Average Degree: %.2f" % (fan_in / num_blocks)))
74+
print(("Number of Virtual Methods: %d" % (num_virtual)))
7375
print(
74-
"%dth percentile in Method Size: %.2f"
75-
% (
76-
int(args.percentile),
77-
np.percentile(np.array(method_size_array), int(args.percentile)),
76+
(
77+
"%dth percentile in Method Size: %.2f"
78+
% (
79+
int(args.percentile),
80+
np.percentile(np.array(method_size_array), int(args.percentile)),
81+
)
7882
)
7983
)
8084
print(
81-
"%dth percentile in Block Size: %.2f"
82-
% (
83-
int(args.percentile),
84-
np.percentile(np.array(block_size_array), int(args.percentile)),
85+
(
86+
"%dth percentile in Block Size: %.2f"
87+
% (
88+
int(args.percentile),
89+
np.percentile(np.array(block_size_array), int(args.percentile)),
90+
)
8591
)
8692
)
8793
print(
88-
"Methods of size %d: %d"
89-
% (int(args.countSize), method_size_array.count(int(args.countSize)))
94+
(
95+
"Methods of size %d: %d"
96+
% (int(args.countSize), method_size_array.count(int(args.countSize)))
97+
)
9098
)
9199
# print method_size_array
92100
# print block_size_array

pyredex/unpacker.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -645,7 +645,7 @@ def unpackage(
645645
with open(jarpath, "wb") as jar:
646646
jar.write(cj.read(jar_sizes[i]))
647647

648-
for j in jar_sizes.keys():
648+
for j in list(jar_sizes.keys()):
649649
jar_size = getsize(
650650
dex_dir + "/" + self._store_name + "-" + str(j) + ".dex.jar"
651651
)

redex_gdb_hooks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ def invoke(self, arg, from_tty):
159159
val = get_gdb_val_for_str(arg)
160160
printer = lookup_function(val)
161161
if printer is None:
162-
print('No symbol "{0}" in current context'.format(arg))
162+
print(('No symbol "{0}" in current context'.format(arg)))
163163
return
164164
printer.to_string()
165165

test/bisect-passes.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def bisect(passes, config, config_path, cmd):
4040
while lower < upper - 1:
4141
m = (lower + upper) / 2
4242
testpasses = slice_passes(passes, lower, m)
43-
print("Testing passes: " + str(testpasses))
43+
print(("Testing passes: " + str(testpasses)))
4444
config["redex"]["passes"] = testpasses
4545
with open(config_path, "w") as config_file:
4646
json.dump(config, config_file)
@@ -64,7 +64,7 @@ def bisect(passes, config, config_path, cmd):
6464

6565
try:
6666
bad_passes = bisect(config["redex"]["passes"], config, args.config, args.cmd)
67-
print("FAILING PASSES:" + str(bad_passes))
67+
print(("FAILING PASSES:" + str(bad_passes)))
6868
finally:
6969
with open(args.config, "w") as config_file:
7070
config_file.write(contents)

tools/callgraph-analysis/corelib/loader.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def get_nodes_in_class(self, class_name):
7272
print("class not exist in callgraph")
7373
return None
7474
return_val = []
75-
for value in self.node_classes[class_name].values():
75+
for value in list(self.node_classes[class_name].values()):
7676
return_val.extend(value)
7777
return return_val
7878

tools/hprof/dump_classes_from_hprof.py

Lines changed: 34 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -497,14 +497,14 @@ def resolve(self, hprof_data):
497497
merged_fields_builder[name][clazz.name] = value
498498
clazz = clazz.super_class
499499

500-
for key, value in merged_fields_builder.items():
500+
for key, value in list(merged_fields_builder.items()):
501501
# Avoid over-writing python internals, like __dict__
502502
if key in self.fields.__dict__:
503503
key = "__hprof_" + key
504504
assert key not in self.fields.__dict__
505505

506506
if len(value) == 1:
507-
setattr(self.fields, key, next(iter(value.values())))
507+
setattr(self.fields, key, next(iter(list(value.values()))))
508508
else:
509509
# There is a conflict in the class hierarchy (e.g. privates with the
510510
# same name), so we need to store a dictionary.
@@ -522,8 +522,8 @@ def outgoing_references(self, filter_function=lambda x: True):
522522
# and attribute weight the wrong way.
523523
# Classes should be walked explicitly
524524
refs = []
525-
for class_name, fields in self.class_fields.items():
526-
for name, value in fields.items():
525+
for class_name, fields in list(self.class_fields.items()):
526+
for name, value in list(fields.items()):
527527
if isinstance(value, HprofObject) and filter_function(value):
528528
refs.append(FieldReference(self, value, class_name, name))
529529
return refs
@@ -959,11 +959,11 @@ def add_root(self, hprof_root):
959959

960960
def resolve(self):
961961
# First resolve heaps
962-
for heap in self.heap_dict.values():
962+
for heap in list(self.heap_dict.values()):
963963
heap.resolve(self)
964964

965965
# Then resolve classes
966-
for obj in self.object_id_dict.values():
966+
for obj in list(self.object_id_dict.values()):
967967
if isinstance(obj, HprofClass):
968968
clazz = obj
969969
clazz.resolve(self)
@@ -976,18 +976,18 @@ def resolve(self):
976976
self.class_name_dict[clazz.name]
977977
)
978978
self.dupe_class_dict[clazz.name].append(clazz)
979-
print("Warning: duplicate class: %s" % clazz.name)
979+
print(("Warning: duplicate class: %s" % clazz.name))
980980
else:
981981
self.class_name_dict[clazz.name] = clazz
982982
# Fix up all classes to derive from java.lang.Class
983983
# at the time we create every HprofClass 'java.lang.Class' may have
984984
# not be parsed yet and thus unavailable
985985
clsCls = self.class_name_dict["java.lang.Class"]
986-
for cls in self.class_name_dict.values():
986+
for cls in list(self.class_name_dict.values()):
987987
cls.clazz = clsCls
988988

989989
# Then other objects
990-
for obj in self.object_id_dict.values():
990+
for obj in list(self.object_id_dict.values()):
991991
if not isinstance(obj, HprofClass):
992992
obj.resolve(self)
993993
obj.is_root = False # Fixed up for root objects below
@@ -1019,15 +1019,15 @@ def lookup_load_class_record(self, class_object_id):
10191019
def lookup_instances_of_class(self, class_name):
10201020
return [
10211021
obj
1022-
for obj in self.object_id_dict.values()
1022+
for obj in list(self.object_id_dict.values())
10231023
if isinstance(obj, HprofInstance) and obj.clazz.name == class_name
10241024
]
10251025

10261026
def load_inverted_references(self):
10271027
if self.inverted_references is None:
10281028
# Will be much faster for later invocations
10291029
self.inverted_references = defaultdict(list)
1030-
for heap_obj in self.object_id_dict.values():
1030+
for heap_obj in list(self.object_id_dict.values()):
10311031
for ref in heap_obj.outgoing_references():
10321032
self.inverted_references[ref.referee].append(ref)
10331033

@@ -1121,7 +1121,7 @@ def roots_of_obj(hprof_data, obj):
11211121

11221122
def zygote_references_to_app_objects(hprof_data):
11231123
references = []
1124-
for obj in hprof_data.object_id_dict.values():
1124+
for obj in list(hprof_data.object_id_dict.values()):
11251125
if obj.heap.name == "zygote":
11261126
for reference in obj.outgoing_references():
11271127
if reference.referee.heap.name != "zygote":
@@ -1154,7 +1154,7 @@ def write_bitmap(bitmap_instance, filename):
11541154
def open_bitmaps(bitmap_instances):
11551155
tmp_dir = tempfile.mkdtemp(suffix="bitmaps")
11561156
subprocess.call(["open", tmp_dir]) # this only works in Mac - sorry!
1157-
print("Writing %d bitmaps to %s." % (len(bitmap_instances), tmp_dir))
1157+
print(("Writing %d bitmaps to %s." % (len(bitmap_instances), tmp_dir)))
11581158
for i, bitmap in enumerate(bitmap_instances):
11591159
write_bitmap(bitmap, os.path.join(tmp_dir, "bitmap_%s.png" % bitmap.object_id))
11601160
sys.stdout.write("\r%d of %d complete" % (i + 1, len(bitmap_instances)))
@@ -1174,12 +1174,12 @@ def print_view_tree(view_root=None):
11741174
else:
11751175
view_root = all_view_roots[0]
11761176
else:
1177-
print("not an hprofdata: %s" % view_root.__class__)
1177+
print(("not an hprofdata: %s" % view_root.__class__))
11781178

1179-
print("%s" % view_root)
1179+
print(("%s" % view_root))
11801180

11811181
def print_view_node(view_node, indent):
1182-
print("%s%s" % (indent, view_node))
1182+
print(("%s%s" % (indent, view_node)))
11831183
if "android.view.ViewGroup" in view_node.class_fields:
11841184
children = view_node.class_fields["android.view.ViewGroup"]["mChildren"]
11851185
for child in children.array_values:
@@ -1364,25 +1364,30 @@ def forward_comparator(x, y):
13641364
# substring can result in wasted char arrays
13651365
# This isn't exact - need to figure out way of determining unused chars in the middle
13661366
def wasted_string_char_arrays(hprof_data):
1367-
char_arrays = filter(
1368-
lambda v: isinstance(v, HprofPrimitiveArray) and v.prim_type is HprofBasic.CHAR,
1369-
hprof_data.object_id_dict.values(),
1367+
char_arrays = list(
1368+
filter(
1369+
lambda v: isinstance(v, HprofPrimitiveArray)
1370+
and v.prim_type is HprofBasic.CHAR,
1371+
list(hprof_data.object_id_dict.values()),
1372+
)
13701373
)
1371-
with_wasted = map(lambda x: (x, wasted_segments(x)), char_arrays)
1372-
return filter(lambda x: len(x[1]) > 0, with_wasted)
1374+
with_wasted = [(x, wasted_segments(x)) for x in char_arrays]
1375+
return [x for x in with_wasted if len(x[1]) > 0]
13731376

13741377

13751378
def wasted_string_char_count(hprof_data):
13761379
wasted_char_array_info = wasted_string_char_arrays(hprof_data)
13771380

13781381
def segment_length(segments):
1379-
return sum(map(lambda x: x[1] - x[0], segments))
1382+
return sum([x[1] - x[0] for x in segments])
13801383

1381-
return sum(map(lambda x: segment_length(x[1]), wasted_char_array_info))
1384+
return sum([segment_length(x[1]) for x in wasted_char_array_info])
13821385

13831386

13841387
def app_heap_objects(hprof_data):
1385-
return [o for o in hprof_data.object_id_dict.values() if o.heap.name != "zygote"]
1388+
return [
1389+
o for o in list(hprof_data.object_id_dict.values()) if o.heap.name != "zygote"
1390+
]
13861391

13871392

13881393
# return a set of containing 'clazz' and all its subclasses
@@ -1410,7 +1415,9 @@ def instances_in(hprof_data, classes):
14101415
classes = set(classes)
14111416
else:
14121417
classes = {classes}
1413-
return {obj for obj in hprof_data.object_id_dict.values() if obj.clazz in classes}
1418+
return {
1419+
obj for obj in list(hprof_data.object_id_dict.values()) if obj.clazz in classes
1420+
}
14141421

14151422

14161423
# return a map of class => {instances} for the given sequence of instances
@@ -1436,7 +1443,7 @@ def java_locals(hprof_data):
14361443
for root in hprof_data.roots
14371444
if root.heap_tag == HeapTag.ROOT_THREAD_OBJECT
14381445
}
1439-
thread_locals = {thread: set() for thread in threads.values()}
1446+
thread_locals = {thread: set() for thread in list(threads.values())}
14401447
for loc in locs:
14411448
thread_locals[threads[loc.thread_serial]].add(loc.obj)
14421449
return thread_locals
@@ -1458,7 +1465,7 @@ def java_locals(hprof_data):
14581465
allow_missing_ids = args.allow_missing_ids
14591466
hp = parse_filename(args.hprof)
14601467
classes = []
1461-
for cls_name, cls in hp.class_name_dict.items():
1468+
for cls_name, cls in list(hp.class_name_dict.items()):
14621469
classes.append(
14631470
(
14641471
cls_name,

tools/python/test/test_dex.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -35,19 +35,19 @@ def test_find_string_idx(self):
3535

3636
for i, s_data_item in enumerate(strings):
3737
idx = dex_file.find_string_idx(s_data_item)
38-
self.assertEquals(i, idx, f'Different index for "{s_data_item.data}"')
38+
self.assertEqual(i, idx, f'Different index for "{s_data_item.data}"')
3939

4040
idx = dex_file.find_string_idx(s_data_item.data)
41-
self.assertEquals(i, idx, f'Different index for "{s_data_item.data}"')
41+
self.assertEqual(i, idx, f'Different index for "{s_data_item.data}"')
4242

4343
# Synthesize some strings.
4444
for s_data_item in strings:
4545
if s_data_item.data:
4646
input = s_data_item.data
4747
before = chr(ord(input[0]) - 1) + input[1:]
4848
idx = dex_file.find_string_idx(before)
49-
self.assertEquals(idx, -1, f'Found "{before}"')
49+
self.assertEqual(idx, -1, f'Found "{before}"')
5050

5151
after = s_data_item.data + "X"
5252
idx = dex_file.find_string_idx(after)
53-
self.assertEquals(idx, -1, f'Found "{after}"')
53+
self.assertEqual(idx, -1, f'Found "{after}"')

tools/reachability-analysis/lib/analysis.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -43,12 +43,12 @@ def group_by_common_keys(d):
4343
matched classes.
4444
"""
4545
value_to_keys = defaultdict(set)
46-
for k, values in d.items():
46+
for k, values in list(d.items()):
4747
for v in values:
4848
value_to_keys[v].add(k)
4949

5050
grouped_keys = defaultdict(set)
51-
for value, keys in value_to_keys.items():
51+
for value, keys in list(value_to_keys.items()):
5252
grouped_keys[frozenset(keys)].add(value)
5353

5454
return grouped_keys
@@ -59,7 +59,7 @@ def find_nodes(graph, filter_fn):
5959
Find all nodes whose names pass the predicate :filter_fn.
6060
"""
6161
nodes = set()
62-
for node in graph.nodes.values():
62+
for node in list(graph.nodes.values()):
6363
if filter_fn(node.name):
6464
nodes.add(node)
6565
return nodes
@@ -70,7 +70,7 @@ def find_nodes_in_packages(graph, pkg_prefixes):
7070
Find all nodes that fall under the list of :pkg_prefixes.
7171
"""
7272
nodes = set()
73-
for node in graph.nodes.values():
73+
for node in list(graph.nodes.values()):
7474
for pkg_prefix in pkg_prefixes:
7575
# If we have an array, use its base type
7676
base_type = node.name.lstrip("[")
@@ -121,7 +121,7 @@ def group_members_by_class(graph):
121121
Return a map of class -> set of members in that class.
122122
"""
123123
grouped_members = defaultdict(set)
124-
for (ty, name), node in graph.nodes.items():
124+
for (ty, name), node in list(graph.nodes.items()):
125125
if ty in [ReachableObjectType.FIELD, ReachableObjectType.METHOD]:
126126
cls, sep, _ = name.partition(";")
127127
cls += ";"
@@ -157,13 +157,13 @@ def mark(node):
157157
for succ in node.succs:
158158
mark(succ)
159159

160-
seeds = [node for node in graph.nodes.values() if len(node.preds) == 0]
160+
seeds = [node for node in list(graph.nodes.values()) if len(node.preds) == 0]
161161

162162
for seed in seeds:
163163
mark(seed)
164164

165165
closure = set()
166-
for node in graph.nodes.values():
166+
for node in list(graph.nodes.values()):
167167
if node not in visited:
168168
closure.add(node)
169169

0 commit comments

Comments
 (0)