Skip to content

Commit

Permalink
fix(py): update script to collect crab's results
Browse files Browse the repository at this point in the history
  • Loading branch information
LinerSu committed Sep 23, 2021
1 parent 4d9a992 commit 7feee1a
Show file tree
Hide file tree
Showing 3 changed files with 124 additions and 72 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
build
build*
build_fuzz
debug
cex
Expand Down
106 changes: 65 additions & 41 deletions scripts/get_exper_brunch_stat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,52 +3,76 @@
import argparse
from collections import defaultdict


def read_brunchstat_from_log(log_file_name):
log_file = open(log_file_name, 'r')
line = log_file.readline()
data = defaultdict(list)
cur_test = None
while line:
# look for next test
new_test = re.search("Test: ", line)
if new_test:
span = new_test.span()
test_name = line[span[1]:]
# remove _unsat_test
cur_test = test_name[:-12]
elif line.startswith("BRUNCH_STAT"):
stat = line.split()
# stat_name = " ".join(stat[1:-1])
stat_num = stat[-1]
data[cur_test].append(stat_num)
log_file = open(log_file_name, 'r')
line = log_file.readline()
data = defaultdict(list)
cur_test = None
first = False
second = False
while line:
# look for next test
new_test = re.search("Test: ", line)
if new_test:
span = new_test.span()
test_name = line[span[1]:]
# remove _unsat_test
cur_test = test_name[:-12]
elif line.startswith("BRUNCH_STAT"):
stat = line.split()
# stat_name = " ".join(stat[1:-1])
if stat[1] == "crab.isderef.not.solve":
first = True
if stat[1] == "crab.isderef.solve":
if not first:
data[cur_test].append(0)
first = True
second = True
if stat[1] == "BMC":
if not first:
data[cur_test].append(0)
if not second:
data[cur_test].append(0)
first = False
second = False
stat_num = stat[-1]
data[cur_test].append(stat_num)
line = log_file.readline()

log_file.close()
return data

log_file.close()
return data

def write_brunchstat_into_csv(data, out_file):
with open(out_file, 'w+', newline='') as csvfile:
writer = csv.writer(csvfile)
writer.writerow([
"job_name",
"result",
"bmc.circ_sz",
"bmc.dag_sz",
"bmc_time",
"bmc_solve_time",
"cda_time", "gssa_time",
"opsem_assert_time",
"opsem_simp_time",
"seahorn_total_time"])
for k, v in data.items():
writer.writerow([k, *v])
with open(out_file, 'w+', newline='') as csvfile:
writer = csv.writer(csvfile)
writer.writerow([
"job_name",
"result",
"bmc.circ_sz",
"bmc.dag_sz",
"crab.isderef.not.solve",
"crab.isderef.solve",
"bmc_time",
"bmc_solve_time",
"opsem_assert_time",
"opsem_simp_time",
"seahorn_total_time"])
for k, v in data.items():
writer.writerow([k, *v])


if __name__ == "__main__":
parser = argparse.ArgumentParser(
parser = argparse.ArgumentParser(
description='in and out files')
parser.add_argument('logfile', type=str, help=".log file to read from")
parser.add_argument('--outfile', '-o', type=str, default="brunch_stat.csv", help="ouput csv filename")
args = parser.parse_args()
data = read_brunchstat_from_log(args.logfile)
write_brunchstat_into_csv(data, args.outfile)

parser.add_argument('logfile', type=str, help=".log file to read from")
parser.add_argument(
'--outfile',
'-o',
type=str,
default="brunch_stat.csv",
help="ouput csv filename")
args = parser.parse_args()
data = read_brunchstat_from_log(args.logfile)
write_brunchstat_into_csv(data, args.outfile)
88 changes: 58 additions & 30 deletions scripts/get_exper_res.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,27 @@
"klee": "klee.csv"}


def extra_to_filename(extra, suffix='.csv', prefix=''):
'''extra: --a=b --c=d to filename: a.b.c.d.csv'''
if (len(extra) == 0):
return f'base{suffix}'
parts = []
for flag in extra:
if flag.startswith('--'):
flag = flag[2:]
parts.extend(flag.split('='))
return f'{prefix}{"_".join(parts)}{suffix}'


def make_build_path(extra, build_path='../build'):
build_path += extra_to_filename(extra, '', '_') if extra else '_base'
print(f'[Build Path] {build_path}')
if not os.path.exists(build_path):
os.mkdir(build_path)
global BUILDABSPATH
BUILDABSPATH = os.path.abspath(build_path)


def get_output_level():
if args.debug:
output_level = sys.stdout
Expand All @@ -30,6 +51,7 @@ def get_output_level():
def make_new_cmake_conf():
use_klee = "ON" if args.klee else "OFF"
use_bleeding_edge = "ON" if args.bleed_edge else "OFF"
use_crab = "ON" if "--crab" in extra else "OFF"
if args.smack_parser:
use_smack = "ON" if args.smack else "OFF"
smack_enable_no_mem_split = "ON" if args.mem_no_split else "OFF"
Expand All @@ -39,7 +61,8 @@ def make_new_cmake_conf():
smack_args = ""
return f'cmake -DSEA_LINK=llvm-link-10 -DCMAKE_C_COMPILER=clang-10\
-DCMAKE_CXX_COMPILER=clang++-10 -DSEA_ENABLE_KLEE={use_klee} {smack_args}\
-DSEA_WITH_BLEEDING_EDGE={use_bleeding_edge} -DSEAHORN_ROOT={SEAHORN_ROOT} ../ -GNinja'
-DSEA_WITH_BLEEDING_EDGE={use_bleeding_edge} -DSEA_ENABLE_CRAB={use_crab}\
-DSEAHORN_ROOT={SEAHORN_ROOT} ../ -GNinja'


def read_data_from_xml(res_data):
Expand Down Expand Up @@ -72,90 +95,85 @@ def collect_res_from_ctest(file_name):
read_data_from_xml(res_data)
write_data_into_csv(
"{dir}/{file}".format(dir="../data", file=file_name), res_data)
print("Done, find result csv file at: %s" % file_name)

def extra_to_filename(extra, suffix='csv'):
'''extra: --a=b --c=d to filename: a.b.c.d.csv'''
if (len(extra) == 0):
return f'base.{suffix}'
parts = []
for flag in extra:
if flag.startswith('--'):
flag = flag[2:]
parts.extend(flag.split('='))
return f'{"_".join(parts)}.{suffix}'
print("[Results] Please find result csv file at: ../data/%s" % file_name)


def run_ctest_for_seahorn():
print("Start making SeaHorn results...")
print("[SeaHorn] Start making SeaHorn results...")
set_env = ''
if extra and len(extra) > 0:
verify_flags = " ".join(extra)
print(f'Run SeaHorn with extra configs: {verify_flags} ')
set_env = f'env VERIFY_FLAGS=\"{verify_flags}\"'
cmake_conf = make_new_cmake_conf()
command_lst = ["rm -rf *", cmake_conf, "ninja",
f'{set_env} ctest -j{os.cpu_count()} -D ExperimentalTest -R . --timeout {args.timeout}']
f'{set_env} ctest -j{os.cpu_count()} -D ExperimentalTest -R . --timeout {args.timeout}']
make_build_path(extra)
cddir = "cd " + BUILDABSPATH
for strcmd in command_lst:
cddir += " ; " + strcmd
if args.debug:
print(cddir)
print(f'[Command] {cddir}')
process = subprocess.Popen(
'/bin/bash',
shell=True,
stdin=subprocess.PIPE,
stdout=get_output_level())
_ = process.communicate(cddir.encode())
collect_res_from_ctest(extra_to_filename(extra))
collect_stat_from_ctest_log(extra_to_filename(extra, suffix='brunch.csv'))
collect_stat_from_ctest_log(extra_to_filename(extra, suffix='.brunch.csv'))


def collect_stat_from_ctest_log(outfile):
test_tmpdir = os.path.join(BUILDABSPATH, 'Testing', 'Temporary')
logfiles = [i for i in os.listdir(
test_tmpdir)if os.path.isfile(os.path.join(test_tmpdir, i)) and i.startswith("LastTest_")]
logfiles = [i for i in os.listdir(test_tmpdir)if os.path.isfile(
os.path.join(test_tmpdir, i)) and i.startswith("LastTest_")]
latest_log = logfiles[0]
print("collecting brunch stat from " + logfiles[0])
data = read_brunchstat_from_log(os.path.join(test_tmpdir, latest_log))
outpath = os.path.join(DATAABSPATH, outfile)
write_brunchstat_into_csv(data, outpath)



def run_ctest_for_klee():
cmake_conf = make_new_cmake_conf()
command_lst = ["rm -rf *", cmake_conf, "ninja",
f'ctest -j{os.cpu_count()} -D ExperimentalTest -R klee_ --timeout {args.timeout}']
print("Start making KLEE results...")
print("[KLEE] Start making KLEE results...")
make_build_path(["--klee"])
cddir = "cd " + BUILDABSPATH
for strcmd in command_lst:
cddir += " ; " + strcmd
if args.debug:
print(cddir)
print(f'[Command] {cddir}')
process = subprocess.Popen(
'/bin/bash',
stdin=subprocess.PIPE,
stdout=get_output_level())
_ = process.communicate(cddir.encode())
collect_res_from_ctest(FILE_DICT["klee"])


def run_ctest_for_smack():
cmake_conf = make_new_cmake_conf()
command_lst = ["rm -rf *", cmake_conf, "ninja",
f'ctest -j{os.cpu_count()} -D ExperimentalTest -R smack_ --timeout 200']
print("Start making SMACK results...")
print("[SMACK] Start making SMACK results...")
make_build_path(["--smack"])
cddir = "cd " + BUILDABSPATH
for strcmd in command_lst:
cddir += " ; " + strcmd
if args.debug:
print(cddir)
print(f'[Command] {cddir}')
process = subprocess.Popen(
'/bin/bash',
stdin=subprocess.PIPE,
stdout=get_output_level())
_ = process.communicate(cddir.encode())
mem_split = 'mem_no_split' if args.mem_no_split else 'mem_split'
collect_res_from_ctest(extra_to_filename([str(args.precise), str(args.checks), mem_split]))
collect_res_from_ctest(extra_to_filename(
[str(args.precise), str(args.checks), mem_split]))


def main():
os.makedirs(DATAABSPATH, exist_ok=True)
Expand All @@ -177,15 +195,25 @@ def main():
parser.add_argument('--bleed_edge', action='store_true', default=False)
parser.add_argument('--debug', action='store_true', default=False)
parser.add_argument('--timeout', type=int, default=2000,
help='Seconds before timeout for each test')
subparsers = parser.add_subparsers(help='sub-commands help', dest="smack_parser")
help='Seconds before timeout for each test')
subparsers = parser.add_subparsers(
help='sub-commands help', dest="smack_parser")
smack_parser = subparsers.add_parser('smack', help="smack help")
smack_parser.add_argument('--smack', action='store_true', default=False)
smack_parser.add_argument('--precise', type=str, default='unbounded-integer',
smack_parser.add_argument(
'--precise',
type=str,
default='unbounded-integer',
help='Set Smack precise: unbounded-integer or bit vector')
smack_parser.add_argument('--checks', type=str, default='assertions',
smack_parser.add_argument(
'--checks',
type=str,
default='assertions',
help='Set Smack property checks: assertions or valid-deref')
smack_parser.add_argument('--mem_no_split', action='store_true', default=False,
smack_parser.add_argument(
'--mem_no_split',
action='store_true',
default=False,
help='Set Smack uses no memory split')
args, extra = parser.parse_known_args()
if args.klee:
Expand Down

0 comments on commit 7feee1a

Please sign in to comment.