summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authorZbigniew Jędrzejewski-Szmek <zbyszek@in.waw.pl>2016-01-04 20:18:47 +0100
committerZbigniew Jędrzejewski-Szmek <zbyszek@in.waw.pl>2020-05-30 11:24:58 +0200
commitc544fc319c53a07f108dbfe62468b11909ca96d9 (patch)
tree298d2136952b995c3c5d3654d5fc005116b8157f /tools
parentbasic/hashmap: update comment (diff)
downloadsystemd-c544fc319c53a07f108dbfe62468b11909ca96d9.tar.xz
systemd-c544fc319c53a07f108dbfe62468b11909ca96d9.zip
tools/gdb: decrese indentation to 4 spaces
This follows PEP 8 and matces other python code in systemd.
Diffstat (limited to 'tools')
-rw-r--r--tools/gdb-sd_dump_hashmaps.py138
1 files changed, 69 insertions, 69 deletions
diff --git a/tools/gdb-sd_dump_hashmaps.py b/tools/gdb-sd_dump_hashmaps.py
index 4e8593f320..d245a01ca0 100644
--- a/tools/gdb-sd_dump_hashmaps.py
+++ b/tools/gdb-sd_dump_hashmaps.py
@@ -6,74 +6,74 @@ from __future__ import print_function
import gdb
class sd_dump_hashmaps(gdb.Command):
- "dump systemd's hashmaps"
-
- def __init__(self):
- super(sd_dump_hashmaps, self).__init__("sd_dump_hashmaps", gdb.COMMAND_DATA, gdb.COMPLETE_NONE)
-
- def invoke(self, arg, from_tty):
- d = gdb.parse_and_eval("hashmap_debug_list")
- all_entry_sizes = gdb.parse_and_eval("all_entry_sizes")
- all_direct_buckets = gdb.parse_and_eval("all_direct_buckets")
- uchar_t = gdb.lookup_type("unsigned char")
- ulong_t = gdb.lookup_type("unsigned long")
- debug_offset = gdb.parse_and_eval("(unsigned long)&((HashmapBase*)0)->debug")
-
- print("type, hash, indirect, entries, max_entries, buckets, creator")
- while d:
- h = gdb.parse_and_eval("(HashmapBase*)((char*)%d - %d)" % (int(d.cast(ulong_t)), debug_offset))
-
- if h["has_indirect"]:
- storage_ptr = h["indirect"]["storage"].cast(uchar_t.pointer())
- n_entries = h["indirect"]["n_entries"]
- n_buckets = h["indirect"]["n_buckets"]
- else:
- storage_ptr = h["direct"]["storage"].cast(uchar_t.pointer())
- n_entries = h["n_direct_entries"]
- n_buckets = all_direct_buckets[int(h["type"])];
-
- t = ["plain", "ordered", "set"][int(h["type"])]
-
- print("{}, {}, {}, {}, {}, {}, {} ({}:{})".format(t, h["hash_ops"], bool(h["has_indirect"]), n_entries, d["max_entries"], n_buckets, d["func"], d["file"], d["line"]))
-
- if arg != "" and n_entries > 0:
- dib_raw_addr = storage_ptr + (all_entry_sizes[h["type"]] * n_buckets)
-
- histogram = {}
- for i in xrange(0, n_buckets):
- dib = int(dib_raw_addr[i])
- histogram[dib] = histogram.get(dib, 0) + 1
-
- for dib in sorted(iter(histogram)):
- if dib != 255:
- print("{:>3} {:>8} {} of entries".format(dib, histogram[dib], 100.0*histogram[dib]/n_entries))
- else:
- print("{:>3} {:>8} {} of slots".format(dib, histogram[dib], 100.0*histogram[dib]/n_buckets))
- print("mean DIB of entries: {}".format(sum([dib*histogram[dib] for dib in iter(histogram) if dib != 255])*1.0/n_entries))
-
- blocks = []
- current_len = 1
- prev = int(dib_raw_addr[0])
- for i in xrange(1, n_buckets):
- dib = int(dib_raw_addr[i])
- if (dib == 255) != (prev == 255):
- if prev != 255:
- blocks += [[i, current_len]]
- current_len = 1
- else:
- current_len += 1
-
- prev = dib
- if prev != 255:
- blocks += [[i, current_len]]
- # a block may be wrapped around
- if len(blocks) > 1 and blocks[0][0] == blocks[0][1] and blocks[-1][0] == n_buckets - 1:
- blocks[0][1] += blocks[-1][1]
- blocks = blocks[0:-1]
- print("max block: {}".format(max(blocks, key=lambda a: a[1])))
- print("sum block lens: {}".format(sum(b[1] for b in blocks)))
- print("mean block len: {}".format((1.0 * sum(b[1] for b in blocks) / len(blocks))))
-
- d = d["debug_list_next"]
+ "dump systemd's hashmaps"
+
+ def __init__(self):
+ super(sd_dump_hashmaps, self).__init__("sd_dump_hashmaps", gdb.COMMAND_DATA, gdb.COMPLETE_NONE)
+
+ def invoke(self, arg, from_tty):
+ d = gdb.parse_and_eval("hashmap_debug_list")
+ all_entry_sizes = gdb.parse_and_eval("all_entry_sizes")
+ all_direct_buckets = gdb.parse_and_eval("all_direct_buckets")
+ uchar_t = gdb.lookup_type("unsigned char")
+ ulong_t = gdb.lookup_type("unsigned long")
+ debug_offset = gdb.parse_and_eval("(unsigned long)&((HashmapBase*)0)->debug")
+
+ print("type, hash, indirect, entries, max_entries, buckets, creator")
+ while d:
+ h = gdb.parse_and_eval("(HashmapBase*)((char*)%d - %d)" % (int(d.cast(ulong_t)), debug_offset))
+
+ if h["has_indirect"]:
+ storage_ptr = h["indirect"]["storage"].cast(uchar_t.pointer())
+ n_entries = h["indirect"]["n_entries"]
+ n_buckets = h["indirect"]["n_buckets"]
+ else:
+ storage_ptr = h["direct"]["storage"].cast(uchar_t.pointer())
+ n_entries = h["n_direct_entries"]
+ n_buckets = all_direct_buckets[int(h["type"])];
+
+ t = ["plain", "ordered", "set"][int(h["type"])]
+
+ print("{}, {}, {}, {}, {}, {}, {} ({}:{})".format(t, h["hash_ops"], bool(h["has_indirect"]), n_entries, d["max_entries"], n_buckets, d["func"], d["file"], d["line"]))
+
+ if arg != "" and n_entries > 0:
+ dib_raw_addr = storage_ptr + (all_entry_sizes[h["type"]] * n_buckets)
+
+ histogram = {}
+ for i in xrange(0, n_buckets):
+ dib = int(dib_raw_addr[i])
+ histogram[dib] = histogram.get(dib, 0) + 1
+
+ for dib in sorted(iter(histogram)):
+ if dib != 255:
+ print("{:>3} {:>8} {} of entries".format(dib, histogram[dib], 100.0*histogram[dib]/n_entries))
+ else:
+ print("{:>3} {:>8} {} of slots".format(dib, histogram[dib], 100.0*histogram[dib]/n_buckets))
+ print("mean DIB of entries: {}".format(sum([dib*histogram[dib] for dib in iter(histogram) if dib != 255])*1.0/n_entries))
+
+ blocks = []
+ current_len = 1
+ prev = int(dib_raw_addr[0])
+ for i in xrange(1, n_buckets):
+ dib = int(dib_raw_addr[i])
+ if (dib == 255) != (prev == 255):
+ if prev != 255:
+ blocks += [[i, current_len]]
+ current_len = 1
+ else:
+ current_len += 1
+
+ prev = dib
+ if prev != 255:
+ blocks += [[i, current_len]]
+ # a block may be wrapped around
+ if len(blocks) > 1 and blocks[0][0] == blocks[0][1] and blocks[-1][0] == n_buckets - 1:
+ blocks[0][1] += blocks[-1][1]
+ blocks = blocks[0:-1]
+ print("max block: {}".format(max(blocks, key=lambda a: a[1])))
+ print("sum block lens: {}".format(sum(b[1] for b in blocks)))
+ print("mean block len: {}".format((1.0 * sum(b[1] for b in blocks) / len(blocks))))
+
+ d = d["debug_list_next"]
sd_dump_hashmaps()