Tentative progress in stats
This commit is contained in:
parent
3cb2c508a0
commit
216e442f5b
4 changed files with 359 additions and 127 deletions
|
@ -1,5 +1,6 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from stats_accu import StatsAccumulator
|
||||||
import gather_stats
|
import gather_stats
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
@ -15,6 +16,9 @@ class Config:
|
||||||
if args.feature == 'gather':
|
if args.feature == 'gather':
|
||||||
self.output = args.output
|
self.output = args.output
|
||||||
|
|
||||||
|
elif args.feature == 'sample':
|
||||||
|
self.size = int(args.size)
|
||||||
|
|
||||||
elif args.feature == 'analyze':
|
elif args.feature == 'analyze':
|
||||||
self.data_file = args.data_file
|
self.data_file = args.data_file
|
||||||
|
|
||||||
|
@ -34,6 +38,19 @@ class Config:
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(help='Subcommands')
|
subparsers = parser.add_subparsers(help='Subcommands')
|
||||||
|
|
||||||
|
# Sample stats
|
||||||
|
parser_sample = subparsers.add_parser(
|
||||||
|
'sample',
|
||||||
|
help='Same as gather, but for a random subset of files')
|
||||||
|
parser_sample.set_defaults(feature='sample')
|
||||||
|
parser_sample.add_argument('--size', '-n',
|
||||||
|
default=1000,
|
||||||
|
help=('Pick this number of files'))
|
||||||
|
parser_sample.add_argument('--output', '-o',
|
||||||
|
default='elf_data',
|
||||||
|
help=('Output data to this file. Defaults '
|
||||||
|
'to "elf_data"'))
|
||||||
|
|
||||||
# Gather stats
|
# Gather stats
|
||||||
parser_gather = subparsers.add_parser(
|
parser_gather = subparsers.add_parser(
|
||||||
'gather',
|
'gather',
|
||||||
|
@ -70,11 +87,17 @@ def main():
|
||||||
|
|
||||||
if config.feature == 'gather':
|
if config.feature == 'gather':
|
||||||
stats_accu = gather_stats.gather_system_files(config)
|
stats_accu = gather_stats.gather_system_files(config)
|
||||||
stats_accu.serialize(config.output)
|
stats_accu.dump(config.output)
|
||||||
|
|
||||||
|
elif config.feature == 'sample':
|
||||||
|
stats_accu = gather_stats.gather_system_files(
|
||||||
|
config,
|
||||||
|
sample_size=config.size)
|
||||||
|
|
||||||
elif config.feature == 'analyze':
|
elif config.feature == 'analyze':
|
||||||
# TODO
|
# TODO
|
||||||
print("Not implemented", file=sys.stderr)
|
print("Not implemented", file=sys.stderr)
|
||||||
|
stats_accu = StatsAccumulator.load(config.data_file)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,52 +1,119 @@
|
||||||
from pyelftools_overlay import system_elfs
|
from pyelftools_overlay import system_elfs, get_cfi
|
||||||
import pathos
|
from elftools.dwarf import callframe
|
||||||
|
import multiprocessing
|
||||||
import signal
|
import signal
|
||||||
import itertools
|
import random
|
||||||
|
|
||||||
from stats_accu import StatsAccumulator
|
from stats_accu import \
|
||||||
|
StatsAccumulator, SingleFdeData, \
|
||||||
|
RegsList, FdeData, DwarfInstr
|
||||||
|
|
||||||
|
|
||||||
class FilesProcessor:
|
class FilesProcessor(multiprocessing.Process):
|
||||||
def __init__(self, cores, stats_accu=None):
|
def __init__(self, elf_list, shared_queue):
|
||||||
|
super().__init__()
|
||||||
self.stop_processing = False
|
self.stop_processing = False
|
||||||
self._processed_counter = itertools.count()
|
self.processed_counter = 0
|
||||||
self.cores = cores
|
self.elf_list = elf_list
|
||||||
|
self.shared_queue = shared_queue
|
||||||
if stats_accu is None:
|
|
||||||
stats_accu = StatsAccumulator()
|
|
||||||
self.stats_accu = stats_accu
|
|
||||||
|
|
||||||
def stop_processing_now(self):
|
def stop_processing_now(self):
|
||||||
self.stop_processing = True
|
self.stop_processing = True
|
||||||
|
|
||||||
def next_counter(self):
|
def run(self):
|
||||||
return self._processed_counter.__next__()
|
pos = 0
|
||||||
|
for descr in self.elf_list:
|
||||||
|
if self.stop_processing:
|
||||||
|
break
|
||||||
|
self.process_single_file(descr, pos)
|
||||||
|
pos += 1
|
||||||
|
|
||||||
def run(self, elf_list):
|
print("=== Finished {} ===".format(self.name))
|
||||||
self.elf_count = len(elf_list)
|
return 0
|
||||||
with pathos.multiprocessing.ProcessPool(nodes=self.cores) as pool:
|
|
||||||
pool.map(self.process_single_file, elf_list)
|
|
||||||
|
|
||||||
def process_single_file(self, elf_path):
|
def process_single_file(self, elf_descr, pos_in_list):
|
||||||
if self.stop_processing:
|
if self.stop_processing:
|
||||||
return
|
return
|
||||||
|
|
||||||
cur_file_count = self.next_counter()
|
elf_path, elf_type = elf_descr
|
||||||
print('> [{}/{} {:.0f}%] {}'.format(
|
|
||||||
cur_file_count, self.elf_count,
|
self.processed_counter += 1
|
||||||
cur_file_count / self.elf_count * 100, elf_path))
|
print('[{}, {}/{}] {}'.format(
|
||||||
self.stats_accu.process_file(elf_path)
|
self.shared_queue.qsize(),
|
||||||
|
pos_in_list + 1,
|
||||||
|
len(self.elf_list),
|
||||||
|
elf_path))
|
||||||
|
self.process_file(elf_path, elf_type)
|
||||||
|
|
||||||
|
def process_file(self, path, elftype):
|
||||||
|
''' Process a single file '''
|
||||||
|
|
||||||
|
cfi = get_cfi(path)
|
||||||
|
if not cfi:
|
||||||
|
return None
|
||||||
|
|
||||||
|
data = FdeData()
|
||||||
|
|
||||||
|
for entry in cfi:
|
||||||
|
if isinstance(entry, callframe.CIE): # Is a CIE
|
||||||
|
self.process_cie(entry, data)
|
||||||
|
elif isinstance(entry, callframe.FDE): # Is a FDE
|
||||||
|
self.process_fde(entry, data)
|
||||||
|
|
||||||
|
out = SingleFdeData(path, elftype, data)
|
||||||
|
self.shared_queue.put(out)
|
||||||
|
|
||||||
|
def incr_cell(self, table, key):
|
||||||
|
''' Increments table[key], or sets it to 1 if unset '''
|
||||||
|
if key in table:
|
||||||
|
table[key] += 1
|
||||||
|
else:
|
||||||
|
table[key] = 1
|
||||||
|
|
||||||
|
def process_cie(self, cie, data):
|
||||||
|
''' Process a CIE '''
|
||||||
|
pass # Nothing needed from a CIE
|
||||||
|
|
||||||
|
def process_fde(self, fde, data):
|
||||||
|
''' Process a FDE '''
|
||||||
|
data.fde_count += 1
|
||||||
|
|
||||||
|
decoded = fde.get_decoded()
|
||||||
|
row_count = len(decoded.table)
|
||||||
|
self.incr_cell(data.fde_with_lines, row_count)
|
||||||
|
|
||||||
|
for row in decoded.table:
|
||||||
|
self.process_reg(data.regs.cfa, row['cfa'])
|
||||||
|
for entry in row:
|
||||||
|
if isinstance(entry, int):
|
||||||
|
self.process_reg(data.regs.regs[entry], row[entry])
|
||||||
|
|
||||||
|
def process_reg(self, out_reg, reg_def):
|
||||||
|
''' Process a register '''
|
||||||
|
if isinstance(reg_def, callframe.CFARule):
|
||||||
|
if reg_def.reg is not None:
|
||||||
|
out_reg.regs[reg_def.reg] += 1
|
||||||
|
else:
|
||||||
|
pass # TODO exprs
|
||||||
|
else:
|
||||||
|
self.incr_cell(out_reg.instrs, DwarfInstr.of_pyelf(reg_def.type))
|
||||||
|
if reg_def.type == callframe.RegisterRule.REGISTER:
|
||||||
|
out_reg.regs[reg_def.arg] += 1
|
||||||
|
elif (reg_def.type == callframe.RegisterRule.EXPRESSION) \
|
||||||
|
or (reg_def.type == callframe.RegisterRule.VAL_EXPRESSION):
|
||||||
|
pass # TODO exprs
|
||||||
|
|
||||||
|
|
||||||
def gather_system_files(config):
|
def gather_system_files(config, sample_size=None):
|
||||||
stats_accu = StatsAccumulator()
|
stats_accu = StatsAccumulator()
|
||||||
processor = FilesProcessor(config.cores, stats_accu)
|
processors = []
|
||||||
|
|
||||||
def signal_graceful_exit(sig, frame):
|
def signal_graceful_exit(sig, frame):
|
||||||
''' Stop gracefully now '''
|
''' Stop gracefully now '''
|
||||||
nonlocal processor
|
nonlocal processors
|
||||||
|
|
||||||
print("Stopping after this ELF…")
|
print("Stopping after this ELF…")
|
||||||
|
for processor in processors:
|
||||||
processor.stop_processing_now()
|
processor.stop_processing_now()
|
||||||
|
|
||||||
signal.signal(signal.SIGINT, signal_graceful_exit)
|
signal.signal(signal.SIGINT, signal_graceful_exit)
|
||||||
|
@ -55,6 +122,50 @@ def gather_system_files(config):
|
||||||
for elf_path in system_elfs():
|
for elf_path in system_elfs():
|
||||||
elf_list.append(elf_path)
|
elf_list.append(elf_path)
|
||||||
|
|
||||||
processor.run(elf_list)
|
if sample_size is not None:
|
||||||
|
elf_list_sampled = random.sample(elf_list, sample_size)
|
||||||
|
elf_list = elf_list_sampled
|
||||||
|
|
||||||
|
elf_count = len(elf_list)
|
||||||
|
elf_per_process = elf_count // config.cores
|
||||||
|
elf_list_slices = []
|
||||||
|
for i in range(config.cores - 1):
|
||||||
|
elf_list_slices.append(
|
||||||
|
elf_list[i * elf_per_process : (i+1) * elf_per_process])
|
||||||
|
elf_list_slices.append(
|
||||||
|
elf_list[(config.cores - 1) * elf_per_process
|
||||||
|
: config.cores * elf_per_process])
|
||||||
|
|
||||||
|
shared_queue = multiprocessing.Queue(elf_count)
|
||||||
|
|
||||||
|
for elf_range in elf_list_slices:
|
||||||
|
processors.append(FilesProcessor(elf_range, shared_queue))
|
||||||
|
|
||||||
|
if config.cores > 1:
|
||||||
|
for processor in processors:
|
||||||
|
processor.start()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
for processor in processors:
|
||||||
|
if processor.is_alive():
|
||||||
|
print("== Waiting {} ({} {}) ==".format(
|
||||||
|
processor.name, processor.exitcode,
|
||||||
|
processor.is_alive()))
|
||||||
|
processor.join(timeout=1)
|
||||||
|
if processor.exitcode is None:
|
||||||
|
break # Loop around
|
||||||
|
print("== Joined {} ==".format(processor.name))
|
||||||
|
|
||||||
|
terminated = True
|
||||||
|
for processor in processors:
|
||||||
|
if processor.exitcode is None:
|
||||||
|
terminated = False
|
||||||
|
if terminated:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
processors[0].run() # run(), not start(): in the same thread
|
||||||
|
|
||||||
|
while not shared_queue.empty(): # Reliable because everything is joined
|
||||||
|
stats_accu.add_fde(shared_queue.get_nowait())
|
||||||
|
|
||||||
return stats_accu
|
return stats_accu
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
from elftools.elf.elffile import ELFFile
|
from elftools.elf.elffile import ELFFile
|
||||||
from elftools.common.exceptions import ELFError, DWARFError
|
from elftools.common.exceptions import ELFError, DWARFError
|
||||||
|
from stats_accu import ElfType
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
@ -44,20 +45,20 @@ def system_elfs():
|
||||||
os.readlink(path)))
|
os.readlink(path)))
|
||||||
|
|
||||||
sysbin_dirs = [
|
sysbin_dirs = [
|
||||||
'/lib',
|
('/lib', ElfType.ELF_LIB),
|
||||||
'/usr/lib',
|
('/usr/lib', ElfType.ELF_LIB),
|
||||||
'/usr/local/lib',
|
('/usr/local/lib', ElfType.ELF_LIB),
|
||||||
'/bin',
|
('/bin', ElfType.ELF_BINARY),
|
||||||
'/usr/bin',
|
('/usr/bin', ElfType.ELF_BINARY),
|
||||||
'/usr/local/bin',
|
('/usr/local/bin', ElfType.ELF_BINARY),
|
||||||
'/sbin',
|
('/sbin', ElfType.ELF_BINARY),
|
||||||
]
|
]
|
||||||
to_explore = sysbin_dirs
|
to_explore = sysbin_dirs
|
||||||
|
|
||||||
seen_elfs = set()
|
seen_elfs = set()
|
||||||
|
|
||||||
while to_explore:
|
while to_explore:
|
||||||
bindir = to_explore.pop()
|
bindir, elftype = to_explore.pop()
|
||||||
|
|
||||||
if not os.path.isdir(bindir):
|
if not os.path.isdir(bindir):
|
||||||
continue
|
continue
|
||||||
|
@ -65,12 +66,23 @@ def system_elfs():
|
||||||
for direntry in os.scandir(bindir):
|
for direntry in os.scandir(bindir):
|
||||||
if not direntry.is_file():
|
if not direntry.is_file():
|
||||||
if direntry.is_dir():
|
if direntry.is_dir():
|
||||||
to_explore.append(direntry.path)
|
to_explore.append((direntry.path, elftype))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
canonical_name = readlink_rec(direntry.path)
|
canonical_name = readlink_rec(direntry.path)
|
||||||
if canonical_name in seen_elfs:
|
if canonical_name in seen_elfs:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
valid_elf = True
|
||||||
|
try:
|
||||||
|
with open(canonical_name, 'rb') as handle:
|
||||||
|
magic_bytes = handle.read(4)
|
||||||
|
if magic_bytes != b'\x7fELF':
|
||||||
|
valid_elf = False
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if not valid_elf:
|
||||||
|
continue
|
||||||
|
|
||||||
seen_elfs.add(canonical_name)
|
seen_elfs.add(canonical_name)
|
||||||
yield canonical_name
|
yield (canonical_name, elftype)
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
from elftools.dwarf import callframe
|
from elftools.dwarf import callframe
|
||||||
from pyelftools_overlay import get_cfi
|
import enum
|
||||||
from enum import Enum
|
|
||||||
import json
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import re
|
import re
|
||||||
|
import json
|
||||||
|
import collections
|
||||||
|
|
||||||
from math import ceil
|
from math import ceil
|
||||||
|
|
||||||
|
@ -69,109 +69,195 @@ def elf_so_deps(path):
|
||||||
"{}.").format(path, exn.returncode))
|
"{}.").format(path, exn.returncode))
|
||||||
|
|
||||||
|
|
||||||
class ElfType(Enum):
|
class ElfType(enum.Enum):
|
||||||
ELF_LIB = auto()
|
ELF_LIB = enum.auto()
|
||||||
ELF_BINARY = auto()
|
ELF_BINARY = enum.auto()
|
||||||
|
|
||||||
|
|
||||||
|
class DwarfInstr(enum.Enum):
|
||||||
|
@staticmethod
|
||||||
|
def of_pyelf(val):
|
||||||
|
_table = {
|
||||||
|
callframe.RegisterRule.UNDEFINED: DwarfInstr.INSTR_UNDEF,
|
||||||
|
callframe.RegisterRule.SAME_VALUE: DwarfInstr.INSTR_SAME_VALUE,
|
||||||
|
callframe.RegisterRule.OFFSET: DwarfInstr.INSTR_OFFSET,
|
||||||
|
callframe.RegisterRule.VAL_OFFSET: DwarfInstr.INSTR_VAL_OFFSET,
|
||||||
|
callframe.RegisterRule.REGISTER: DwarfInstr.INSTR_REGISTER,
|
||||||
|
callframe.RegisterRule.EXPRESSION: DwarfInstr.INSTR_EXPRESSION,
|
||||||
|
callframe.RegisterRule.VAL_EXPRESSION:
|
||||||
|
DwarfInstr.INSTR_VAL_EXPRESSION,
|
||||||
|
callframe.RegisterRule.ARCHITECTURAL:
|
||||||
|
DwarfInstr.INSTR_ARCHITECTURAL,
|
||||||
|
}
|
||||||
|
return _table[val]
|
||||||
|
|
||||||
|
INSTR_UNDEF = enum.auto()
|
||||||
|
INSTR_SAME_VALUE = enum.auto()
|
||||||
|
INSTR_OFFSET = enum.auto()
|
||||||
|
INSTR_VAL_OFFSET = enum.auto()
|
||||||
|
INSTR_REGISTER = enum.auto()
|
||||||
|
INSTR_EXPRESSION = enum.auto()
|
||||||
|
INSTR_VAL_EXPRESSION = enum.auto()
|
||||||
|
INSTR_ARCHITECTURAL = enum.auto()
|
||||||
|
|
||||||
|
|
||||||
|
def intify_dict(d):
|
||||||
|
out = {}
|
||||||
|
for key in d:
|
||||||
|
try:
|
||||||
|
nKey = int(key)
|
||||||
|
except Exception:
|
||||||
|
nKey = key
|
||||||
|
|
||||||
|
try:
|
||||||
|
out[nKey] = int(d[key])
|
||||||
|
except ValueError:
|
||||||
|
out[nKey] = d[key]
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
class RegData:
|
||||||
|
def __init__(self, instrs=None, regs=None, exprs=None):
|
||||||
|
if instrs is None:
|
||||||
|
instrs = {}
|
||||||
|
if regs is None:
|
||||||
|
regs = [0]*17
|
||||||
|
if exprs is None:
|
||||||
|
exprs = {}
|
||||||
|
self.instrs = intify_dict(instrs)
|
||||||
|
self.regs = regs
|
||||||
|
self.exprs = intify_dict(exprs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def map_dict_keys(fnc, dic):
|
||||||
|
out = {}
|
||||||
|
for key in dic:
|
||||||
|
out[fnc(key)] = dic[key]
|
||||||
|
return out
|
||||||
|
|
||||||
|
def dump(self):
|
||||||
|
return {
|
||||||
|
'instrs': RegData.map_dict_keys(lambda x: x.value, self.instrs),
|
||||||
|
'regs': self.regs,
|
||||||
|
'exprs': self.exprs,
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load(data):
|
||||||
|
return RegData(
|
||||||
|
instrs=RegData.map_dict_keys(
|
||||||
|
lambda x: DwarfInstr(int(x)),
|
||||||
|
data['instrs']),
|
||||||
|
regs=data['regs'],
|
||||||
|
exprs=data['exprs'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RegsList:
|
||||||
|
def __init__(self, cfa=None, regs=None):
|
||||||
|
if cfa is None:
|
||||||
|
cfa = RegsList.fresh_reg()
|
||||||
|
if regs is None:
|
||||||
|
regs = [RegsList.fresh_reg() for _ in range(17)]
|
||||||
|
self.cfa = cfa
|
||||||
|
self.regs = regs
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fresh_reg():
|
||||||
|
return RegData()
|
||||||
|
|
||||||
|
def dump(self):
|
||||||
|
return {
|
||||||
|
'cfa': RegData.dump(self.cfa),
|
||||||
|
'regs': [RegData.dump(r) for r in self.regs],
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load(data):
|
||||||
|
return RegsList(
|
||||||
|
cfa=RegData.load(data['cfa']),
|
||||||
|
regs=[RegData.load(r) for r in data['regs']],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class FdeData:
|
||||||
|
def __init__(self, fde_count=0, fde_with_lines=None, regs=None):
|
||||||
|
if fde_with_lines is None:
|
||||||
|
fde_with_lines = {}
|
||||||
|
if regs is None:
|
||||||
|
regs = RegsList()
|
||||||
|
|
||||||
|
self.fde_count = fde_count
|
||||||
|
self.fde_with_lines = intify_dict(fde_with_lines)
|
||||||
|
self.regs = regs
|
||||||
|
|
||||||
|
def dump(self):
|
||||||
|
return {
|
||||||
|
'fde_count': self.fde_count,
|
||||||
|
'fde_with_lines': self.fde_with_lines,
|
||||||
|
'regs': self.regs.dump(),
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load(data):
|
||||||
|
return FdeData(
|
||||||
|
fde_count=int(data['fde_count']),
|
||||||
|
fde_with_lines=data['fde_with_lines'],
|
||||||
|
regs=RegsList.load(data['regs']))
|
||||||
|
|
||||||
|
|
||||||
class SingleFdeData:
|
class SingleFdeData:
|
||||||
def __init__(self, path, elf_type, data):
|
def __init__(self, path, elf_type, data):
|
||||||
self.path = path
|
self.path = path
|
||||||
self.elf_type = elf_type
|
self.elf_type = elf_type
|
||||||
self.data = data
|
self.data = data # < of type FdeData
|
||||||
|
|
||||||
self.gather_deps()
|
self.gather_deps()
|
||||||
|
|
||||||
def gather_deps(self):
|
def gather_deps(self):
|
||||||
""" Collect ldd data on the binary """
|
""" Collect ldd data on the binary """
|
||||||
self.deps = elf_so_deps(self.path)
|
# self.deps = elf_so_deps(self.path)
|
||||||
|
self.deps = []
|
||||||
|
|
||||||
|
def dump(self):
|
||||||
|
return {
|
||||||
|
'path': self.path,
|
||||||
|
'elf_type': self.elf_type.value,
|
||||||
|
'data': self.data.dump()
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load(data):
|
||||||
|
return SingleFdeData(
|
||||||
|
data['path'],
|
||||||
|
ElfType(int(data['elf_type'])),
|
||||||
|
FdeData.load(data['data']))
|
||||||
|
|
||||||
|
|
||||||
class StatsAccumulator:
|
class StatsAccumulator:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.elf_count = 0
|
self.fdes = []
|
||||||
self.fde_count = 0
|
|
||||||
self.fde_row_count = 0
|
|
||||||
self.fde_with_n_rows = {}
|
|
||||||
|
|
||||||
def serialize(self, path):
|
def add_fde(self, fde_data):
|
||||||
''' Save the gathered data to `stream` '''
|
self.fdes.append(fde_data)
|
||||||
|
|
||||||
notable_fields = [
|
def get_fdes(self):
|
||||||
'elf_count',
|
return self.fdes
|
||||||
'fde_count',
|
|
||||||
'fde_row_count',
|
|
||||||
'fde_with_n_rows',
|
|
||||||
]
|
|
||||||
out = {}
|
|
||||||
for field in notable_fields:
|
|
||||||
out[field] = self.__dict__[field]
|
|
||||||
|
|
||||||
with open(path, 'wb') as stream:
|
def add_stats_accu(self, stats_accu):
|
||||||
json.dump(out, stream)
|
for fde in stats_accu.get_fdes():
|
||||||
|
self.add_fde(fde)
|
||||||
|
|
||||||
|
def dump(self, path):
|
||||||
|
dict_form = [fde.dump() for fde in self.fdes]
|
||||||
|
print(dict_form)
|
||||||
|
with open(path, 'w') as handle:
|
||||||
|
handle.write(json.dumps(dict_form))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def unserialize(path):
|
def load(path):
|
||||||
|
with open(path, 'r') as handle:
|
||||||
|
text = handle.read()
|
||||||
out = StatsAccumulator()
|
out = StatsAccumulator()
|
||||||
with open(path, 'wb') as stream:
|
out.fdes = [SingleFdeData.load(data) for data in json.loads(text)]
|
||||||
data = json.load(stream)
|
|
||||||
for field in data:
|
|
||||||
out.field = data[field]
|
|
||||||
return out
|
return out
|
||||||
|
|
||||||
def report(self):
|
|
||||||
''' Report on the statistics gathered '''
|
|
||||||
|
|
||||||
self.fde_rows_proportion = ProportionFinder(
|
|
||||||
self.fde_with_n_rows)
|
|
||||||
|
|
||||||
rows = [
|
|
||||||
("ELFs analyzed", self.elf_count),
|
|
||||||
("FDEs analyzed", self.fde_count),
|
|
||||||
("FDE rows analyzed", self.fde_row_count),
|
|
||||||
("Avg. rows per FDE", self.fde_row_count / self.fde_count),
|
|
||||||
("Median rows per FDE",
|
|
||||||
self.fde_rows_proportion.find_at_proportion(0.5)),
|
|
||||||
("Max rows per FDE", max(self.fde_with_n_rows.keys())),
|
|
||||||
]
|
|
||||||
|
|
||||||
title_size = max(map(lambda x: len(x[0]), rows))
|
|
||||||
line_format = "{:<" + str(title_size + 1) + "} {}"
|
|
||||||
|
|
||||||
for row in rows:
|
|
||||||
print(line_format.format(row[0], row[1]))
|
|
||||||
|
|
||||||
def process_file(self, path):
|
|
||||||
''' Process a single file '''
|
|
||||||
|
|
||||||
cfi = get_cfi(path)
|
|
||||||
if not cfi:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.elf_count += 1
|
|
||||||
|
|
||||||
for entry in cfi:
|
|
||||||
if isinstance(entry, callframe.CIE): # Is a CIE
|
|
||||||
self.process_cie(entry)
|
|
||||||
elif isinstance(entry, callframe.FDE): # Is a FDE
|
|
||||||
self.process_fde(entry)
|
|
||||||
|
|
||||||
def incr_cell(self, table, key):
|
|
||||||
''' Increments table[key], or sets it to 1 if unset '''
|
|
||||||
if key in table:
|
|
||||||
table[key] += 1
|
|
||||||
else:
|
|
||||||
table[key] = 1
|
|
||||||
|
|
||||||
def process_cie(self, cie):
|
|
||||||
''' Process a CIE '''
|
|
||||||
pass # Nothing needed from a CIE
|
|
||||||
|
|
||||||
def process_fde(self, fde):
|
|
||||||
''' Process a FDE '''
|
|
||||||
self.fde_count += 1
|
|
||||||
|
|
||||||
decoded = fde.get_decoded()
|
|
||||||
row_count = len(decoded.table)
|
|
||||||
self.fde_row_count += row_count
|
|
||||||
self.incr_cell(self.fde_with_n_rows, row_count)
|
|
||||||
|
|
Loading…
Reference in a new issue