From 29d60dbadf441376f8869c500b16eb8c9fe9d480 Mon Sep 17 00:00:00 2001 From: yusen Date: Wed, 22 Sep 2021 04:35:47 +0000 Subject: [PATCH 01/24] feat(yaml): add yaml file for running seahorn with crab --- seahorn/sea_crab_base.yaml | 47 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 seahorn/sea_crab_base.yaml diff --git a/seahorn/sea_crab_base.yaml b/seahorn/sea_crab_base.yaml new file mode 100644 index 00000000..a422e727 --- /dev/null +++ b/seahorn/sea_crab_base.yaml @@ -0,0 +1,47 @@ +verify_options: +# Optimization level + '-O3': '' +# +# PREPROCESSING +# + inline: '' + enable-loop-idiom: '' + enable-indvar: '' + no-lower-gv-init-struct: '' + externalize-addr-taken-functions: '' + no-kill-vaarg: '' + with-arith-overflow: true + horn-unify-assumes: false +# do not instrument library functions with memory assertions + no-fat-fns: 'bcmp,memcpy,assert_bytes_match,ensure_linked_list_is_allocated,sea_aws_linked_list_is_valid' + horn-gsa: false +# context-sensitive type-aware alias analysis + dsa: sea-cs-t +# weak support for function pointers. sea-dsa is better but less stable + devirt-functions: 'types' +# bit-precise memory-precise operational semantics + bmc: opsem + horn-vcgen-use-ite: '' +# ignore control flow + horn-vcgen-only-dataflow: false +# reduce by data dependence + horn-bmc-coi: false +# static allocator supports symbolic allocation size + sea-opsem-allocator: static +# stack allocation from a symbolic starting point + horn-explicit-sp0: false +# lambdas for memory + horn-bv2-lambdas: false +# use z3 simplifier during vcgen + horn-bv2-simplify: true +# wide memory manager to track pointer sizes + horn-bv2-extra-widemem: '' +# intermediate results in human readable form for debugging + '-S': '' +# keep intermediate results for debugging + keep-temps: '' + temp-dir: /tmp/verify-c-common +# time and result stats + horn-stats: true + horn-bv2-crab-lower-is-deref: '' + horn-bmc-crab-dom: 'zones' From 1397518a5d5e9034ff4fc356af1f9e315053d609 Mon Sep 17 00:00:00 2001 From: yusen Date: Wed, 22 Sep 2021 04:36:29 +0000 Subject: [PATCH 02/24] feat(lib): add nd_xx function for clam --- seahorn/lib/nd_clam.c | 51 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 seahorn/lib/nd_clam.c diff --git a/seahorn/lib/nd_clam.c b/seahorn/lib/nd_clam.c new file mode 100644 index 00000000..473d3489 --- /dev/null +++ b/seahorn/lib/nd_clam.c @@ -0,0 +1,51 @@ +#include +#include +#include +#include +#include +#include +#include + +#include +#include "nondet.h" + +extern NONDET_FN_ATTR int64_t nd_int64_t(void); +extern NONDET_FN_ATTR int8_t nd_int8_t(void); +extern NONDET_FN_ATTR int16_t nd_int16_t(void); +extern NONDET_FN_ATTR int32_t nd_int32_t(void); + +bool nd_malloc_is_fail(void) { + // make assumption for crab + // malloc always safe + return false; +} + +size_t nd_size_t(void) { + int64_t res = nd_int64_t(); + __VERIFIER_assume(res >= 0); + return (size_t)res; +} + +uint8_t nd_uint8_t(void) { + int8_t res = nd_int8_t(); + __VERIFIER_assume(res >= 0); + return (uint8_t)res; +} + +uint16_t nd_uint16_t(void) { + int16_t res = nd_int16_t(); + __VERIFIER_assume(res >= 0); + return (uint16_t)res; +} + +uint32_t nd_uint32_t(void) { + int32_t res = nd_int32_t(); + __VERIFIER_assume(res >= 0); + return (uint32_t)res; +} + +uint64_t nd_uint64_t(void) { + int64_t res = nd_int64_t(); + __VERIFIER_assume(res >= 0); + return (uint64_t)res; +} \ No newline at end of file From f9ed072e7b28faeac93f4a636d5020e104c8a455 Mon Sep 17 00:00:00 2001 From: yusen Date: Wed, 22 Sep 2021 04:40:31 +0000 Subject: [PATCH 03/24] feat(cmake): modify cmake to allow crab infer sea.is_dereferenceable --- CMakeLists.txt | 1 + seahorn/CMakeLists.txt | 7 +++++++ seahorn/lib/CMakeLists.txt | 6 ++++++ verify.py.in | 7 ++++++- 4 files changed, 20 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index c6af4fef..8220036d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -14,6 +14,7 @@ option(SEA_LLVM_MEM_BUILTINS "Use LLVM builtins memcpy/memmove without memory sa option(SEA_ENABLE_FUZZ "Enable fuzzing" OFF) +option(SEA_ENABLE_CRAB "Enable Seahorn using Clam/Crab" OFF) option(SEA_WITH_BLEEDING_EDGE "Enable bleeding edge proofs" OFF) option(SEA_ENABLE_KLEE "Enable klee" OFF) diff --git a/seahorn/CMakeLists.txt b/seahorn/CMakeLists.txt index b737edb9..4d2b8db6 100644 --- a/seahorn/CMakeLists.txt +++ b/seahorn/CMakeLists.txt @@ -186,6 +186,9 @@ function(sea_attach_bc name) set_property(TARGET ${SOURCE_EXE} PROPERTY EXCLUDE_FROM_ALL ON) target_compile_definitions(${SOURCE_EXE} PRIVATE __SEAHORN__) + if(SEA_ENABLE_CRAB) + target_compile_definitions(${SOURCE_EXE} PRIVATE __CRAB__) + endif() # when defined, check AWS_ASSERT as well as sassert target_compile_definitions(${SOURCE_EXE} PRIVATE __SEAHORN_ASSERT__) target_compile_definitions(${SOURCE_EXE} PRIVATE VACCHECK) @@ -245,6 +248,9 @@ endmacro() function(sea_attach_bc_link name) sea_link_libraries(${name} sea_proofs.ir sea_bounds.ir str_proofs.ir) + if(SEA_ENABLE_CRAB) + sea_link_libraries(${name} crab_proofs.ir) + endif() sea_attach_bc(${name}) endfunction() @@ -424,6 +430,7 @@ add_definitions( configure_file(include/config.h.in ${PROJECT_BINARY_DIR}/include/config.h) configure_file(sea.yaml sea.yaml @ONLY) +configure_file(sea_crab_base.yaml sea.crab.yaml @ONLY) configure_file(sea_cex_base.yaml sea.cex.yaml @ONLY) configure_file(sea_vac.yaml sea.vac.yaml @ONLY) configure_file(sea_pcond.yaml sea.pcond.yaml @ONLY) diff --git a/seahorn/lib/CMakeLists.txt b/seahorn/lib/CMakeLists.txt index 2bda36e7..f1b80649 100644 --- a/seahorn/lib/CMakeLists.txt +++ b/seahorn/lib/CMakeLists.txt @@ -24,6 +24,12 @@ if(SEA_ALLOCATOR_CAN_FAIL) endif() sea_attach_bc(sea_proofs) +# crab proof helpers +if(SEA_ENABLE_CRAB) + add_library(crab_proofs nd_clam.c) + sea_attach_bc(crab_proofs) +endif() + # string proof helpers add_library(str_proofs sea_string.cc) diff --git a/verify.py.in b/verify.py.in index 9c33fa34..96431aae 100755 --- a/verify.py.in +++ b/verify.py.in @@ -59,6 +59,8 @@ def main(argv): help='Vacuity mode') argp.add_argument('--pcond', action='store_true', default=False, help='Path condition mode') + argp.add_argument('--crab', action='store_true', default=False, + help='Using crab') argp.add_argument('input_file', nargs=1) argp.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, @@ -90,7 +92,10 @@ def main(argv): 'yama', '--yforce'] # base config - base_config = os.path.join(script_dir, 'seahorn', 'sea.yaml') + if args.crab: + base_config = os.path.join(script_dir, 'seahorn', 'sea.crab.yaml') + else: + base_config = os.path.join(script_dir, 'seahorn', 'sea.yaml') if args.cex: base_config = os.path.join(script_dir, 'seahorn', 'sea.cex.yaml') From 1ff789a4aa2fbc86b5bd663d3cc14552cdb3f675 Mon Sep 17 00:00:00 2001 From: yusen Date: Wed, 22 Sep 2021 04:41:35 +0000 Subject: [PATCH 04/24] feat(lib): add assumptions for string not empty and malloc cannot fail when uses crab --- seahorn/include/utils.h | 6 ++++++ seahorn/lib/proof_allocators.c | 10 ++++++++-- seahorn/lib/string_helper.c | 7 +++++++ 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/seahorn/include/utils.h b/seahorn/include/utils.h index 116468ac..23302e84 100644 --- a/seahorn/include/utils.h +++ b/seahorn/include/utils.h @@ -29,6 +29,12 @@ extern void klee_assume(uintptr_t condition); #define FUZZ_ASSUME(X) #endif +#ifdef __CRAB__ +#define CRAB_ASSUME(X) assume(X) +#else +#define CRAB_ASSUME(X) +#endif + #ifdef __FUZZ__ // set upper bound of X to Y during fuzzing #define FUZZ_ASSUME_LT(X, Y) X %= Y #else diff --git a/seahorn/lib/proof_allocators.c b/seahorn/lib/proof_allocators.c index 6b384a61..1f1512ac 100644 --- a/seahorn/lib/proof_allocators.c +++ b/seahorn/lib/proof_allocators.c @@ -13,11 +13,17 @@ void *realloc(void *ptr, size_t new_size) { return sea_realloc(ptr, new_size); } -void *bounded_malloc_havoc(size_t size) { +void *bounded_malloc(size_t size) { + #ifdef __CRAB__ + assume(size > 1); + #endif return size == 0 ? NULL : sea_malloc_havoc_safe(size); } -void *can_fail_malloc_havoc(size_t size) { +void *can_fail_malloc(size_t size) { + #ifdef __CRAB__ + assume(size > 1); + #endif return size == 0 ? NULL : sea_malloc_havoc(size); } diff --git a/seahorn/lib/string_helper.c b/seahorn/lib/string_helper.c index c9dbf141..fc7bb87b 100644 --- a/seahorn/lib/string_helper.c +++ b/seahorn/lib/string_helper.c @@ -12,8 +12,12 @@ struct aws_string *ensure_string_is_allocated(size_t len) { if (str) { /* Fields are declared const, so we need to copy them in like this */ + #ifdef __CRAB__ + *(struct aws_allocator **)(&str->allocator) = sea_allocator(); + #else *(struct aws_allocator **)(&str->allocator) = nd_bool() ? sea_allocator() : NULL; + #endif *(size_t *)(&str->len) = len; *(uint8_t *)&str->bytes[len] = '\0'; } @@ -22,6 +26,9 @@ struct aws_string *ensure_string_is_allocated(size_t len) { struct aws_string *ensure_string_is_allocated_bounded_length(size_t max_size) { size_t len = nd_size_t(); + #ifdef __CRAB__ + assume(len > 1); + #endif assume(len < max_size); return ensure_string_is_allocated(len); } From a5449e2902656faa9e085654494db94611d591fb Mon Sep 17 00:00:00 2001 From: yusen Date: Wed, 22 Sep 2021 04:42:39 +0000 Subject: [PATCH 05/24] feat(jobs): adapt crab interpret predicate function --- .../aws_byte_cursor_left_trim_pred_harness.c | 2 +- .../aws_byte_cursor_right_trim_pred_harness.c | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/seahorn/jobs/byte_cursor_left_trim_pred/aws_byte_cursor_left_trim_pred_harness.c b/seahorn/jobs/byte_cursor_left_trim_pred/aws_byte_cursor_left_trim_pred_harness.c index 5263cde2..97d1b0f4 100644 --- a/seahorn/jobs/byte_cursor_left_trim_pred/aws_byte_cursor_left_trim_pred_harness.c +++ b/seahorn/jobs/byte_cursor_left_trim_pred/aws_byte_cursor_left_trim_pred_harness.c @@ -6,7 +6,7 @@ #include #include -#if defined(__KLEE__) || defined(__FUZZ__) || defined(__SYMBIOTIC__) +#if defined(__KLEE__) || defined(__FUZZ__) || defined(__SYMBIOTIC__) || defined(__CRAB__) bool uninterpreted_predicate_fn(uint8_t value) { return nd_bool(); } diff --git a/seahorn/jobs/byte_cursor_right_trim_pred/aws_byte_cursor_right_trim_pred_harness.c b/seahorn/jobs/byte_cursor_right_trim_pred/aws_byte_cursor_right_trim_pred_harness.c index 8c79c15e..dfe20921 100644 --- a/seahorn/jobs/byte_cursor_right_trim_pred/aws_byte_cursor_right_trim_pred_harness.c +++ b/seahorn/jobs/byte_cursor_right_trim_pred/aws_byte_cursor_right_trim_pred_harness.c @@ -8,7 +8,7 @@ #include #include -#if defined(__KLEE__) || defined(__FUZZ__) || defined(__SYMBIOTIC__) +#if defined(__KLEE__) || defined(__FUZZ__) || defined(__SYMBIOTIC__) || defined(__CRAB__) bool uninterpreted_predicate_fn(uint8_t value) { return nd_bool(); } From 65cf58d6dd68546199312b3b1b63f2a069f8a45e Mon Sep 17 00:00:00 2001 From: yusen Date: Thu, 23 Sep 2021 04:01:21 +0000 Subject: [PATCH 06/24] fix(py): update script to collect crab's results --- .gitignore | 2 +- scripts/get_exper_brunch_stat.py | 28 ++++++++++-- scripts/get_exper_res.py | 78 ++++++++++++++++++++++++-------- 3 files changed, 83 insertions(+), 25 deletions(-) diff --git a/.gitignore b/.gitignore index 5bacdbd2..2af8fcfc 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -build +build* build_fuzz debug cex diff --git a/scripts/get_exper_brunch_stat.py b/scripts/get_exper_brunch_stat.py index d8043f24..43e036e4 100644 --- a/scripts/get_exper_brunch_stat.py +++ b/scripts/get_exper_brunch_stat.py @@ -15,6 +15,8 @@ def read_brunchstat_from_log(log_file_name): line = log_file.readline() data = list() # list of metric_name->metric cur_test = None + first = False + second = False while line: # look for next test new_test = re.search("Test: ", line) @@ -30,8 +32,22 @@ def read_brunchstat_from_log(log_file_name): stat = line.split() stat_name = " ".join(stat[1:-1]) if stat_name in metrics: - stat_num = stat[-1] - data[-1][stat_name] = stat_num + if stat_name == "crab.isderef.not.solve": + first = True + if stat_name == "crab.isderef.solve": + if not first: + data[cur_test].append(0) + first = True + second = True + if stat_name == "BMC": + if not first: + data[-1][stat_name] = 0 + if not second: + data[-1][stat_name] = 0 + first = False + second = False + stat_num = stat[-1] + data[-1][stat_name] = stat_num line = log_file.readline() log_file.close() @@ -120,8 +136,12 @@ def write_brunchstat_into_csv(data, out_file): parser = argparse.ArgumentParser( description='in and out files') parser.add_argument('logfile', type=str, help=".log file to read from") - parser.add_argument('--outfile', '-o', type=str, - default="brunch_stat.csv", help="ouput csv filename") + parser.add_argument( + '--outfile', + '-o', + type=str, + default="brunch_stat.csv", + help="ouput csv filename") args = parser.parse_args() data = read_brunchstat_from_log(args.logfile) write_brunchstat_into_csv(data, args.outfile) diff --git a/scripts/get_exper_res.py b/scripts/get_exper_res.py index 67be12ed..76e80090 100644 --- a/scripts/get_exper_res.py +++ b/scripts/get_exper_res.py @@ -19,6 +19,27 @@ "klee": "klee.csv", "symbiotic": "symbiotic.csv"} +def extra_to_filename(extra, suffix='.csv', prefix=''): + '''extra: --a=b --c=d to filename: a.b.c.d.csv''' + if (len(extra) == 0): + return f'base{suffix}' + parts = [] + for flag in extra: + if flag.startswith('--'): + flag = flag[2:] + parts.extend(flag.split('=')) + return f'{prefix}{"_".join(parts)}{suffix}' + + +def make_build_path(extra, build_path='../build'): + build_path += extra_to_filename(extra, '', '_') if extra else '_base' + print(f'[Build Path] {build_path}') + if not os.path.exists(build_path): + os.mkdir(build_path) + global BUILDABSPATH + BUILDABSPATH = os.path.abspath(build_path) + + def get_output_level(): if args.debug: output_level = sys.stdout @@ -31,6 +52,7 @@ def make_new_cmake_conf(): use_klee = "ON" if args.klee else "OFF" use_symbiotic = "ON" if args.symbiotic else "OFF" use_bleeding_edge = "ON" if args.bleed_edge else "OFF" + use_crab = "ON" if "--crab" in extra else "OFF" if args.smack_parser: use_smack = "ON" if args.smack else "OFF" smack_enable_no_mem_split = "ON" if args.mem_no_split else "OFF" @@ -40,8 +62,8 @@ def make_new_cmake_conf(): smack_args = "" return f'cmake -DSEA_LINK=llvm-link-10 -DCMAKE_C_COMPILER=clang-10\ -DCMAKE_CXX_COMPILER=clang++-10 -DSEA_ENABLE_KLEE={use_klee} {smack_args}\ - -DSEA_WITH_BLEEDING_EDGE={use_bleeding_edge} -DSEA_ENABLE_SYMBIOTIC={use_symbiotic}\ - -DSEAHORN_ROOT={SEAHORN_ROOT} ../ -GNinja' + -DSEA_WITH_BLEEDING_EDGE={use_bleeding_edge} -DSEA_ENABLE_CRAB={use_crab}\ + -DSEA_ENABLE_SYMBIOTIC={use_symbiotic} -DSEAHORN_ROOT={SEAHORN_ROOT} ../ -GNinja' def read_data_from_xml(res_data): @@ -74,6 +96,7 @@ def collect_res_from_ctest(file_name): read_data_from_xml(res_data) write_data_into_csv( "{dir}/{file}".format(dir="../data", file=file_name), res_data) +<<<<<<< HEAD print("Done, find result csv file at: %s" % file_name) @@ -87,10 +110,13 @@ def extra_to_filename(extra, suffix='csv'): flag = flag[2:] parts.extend(flag.split('=')) return f'{"_".join(parts)}.{suffix}' +======= + print("[Results] Please find result csv file at: ../data/%s" % file_name) +>>>>>>> a12af01 (fix(py): update script to collect crab's results) def run_ctest_for_seahorn(): - print("Start making SeaHorn results...") + print("[SeaHorn] Start making SeaHorn results...") set_env = '' if extra and len(extra) > 0: verify_flags = " ".join(extra) @@ -99,11 +125,12 @@ def run_ctest_for_seahorn(): cmake_conf = make_new_cmake_conf() command_lst = ["rm -rf *", cmake_conf, "ninja", f'{set_env} ctest -j{os.cpu_count()} -D ExperimentalTest -R . --timeout {args.timeout}'] + make_build_path(extra) cddir = "cd " + BUILDABSPATH for strcmd in command_lst: cddir += " ; " + strcmd if args.debug: - print(cddir) + print(f'[Command] {cddir}') process = subprocess.Popen( '/bin/bash', shell=True, @@ -111,13 +138,14 @@ def run_ctest_for_seahorn(): stdout=get_output_level()) _ = process.communicate(cddir.encode()) collect_res_from_ctest(extra_to_filename(extra)) - collect_stat_from_ctest_log(extra_to_filename(extra, suffix='brunch.csv')) + collect_stat_from_ctest_log(extra_to_filename(extra, suffix='.brunch.csv')) + def collect_stat_from_ctest_log(outfile): test_tmpdir = os.path.join(BUILDABSPATH, 'Testing', 'Temporary') - logfiles = [i for i in os.listdir( - test_tmpdir)if os.path.isfile(os.path.join(test_tmpdir, i)) and i.startswith("LastTest_")] + logfiles = [i for i in os.listdir(test_tmpdir)if os.path.isfile( + os.path.join(test_tmpdir, i)) and i.startswith("LastTest_")] latest_log = logfiles[0] print("collecting brunch stat from " + logfiles[0]) if args.seahorn: @@ -130,17 +158,17 @@ def collect_stat_from_ctest_log(outfile): outpath = os.path.join(DATAABSPATH, outfile) write_symbiotic_bruchstat_into_csv(data, outpath) - def run_ctest_for_klee(): cmake_conf = make_new_cmake_conf() command_lst = ["rm -rf *", cmake_conf, "ninja", f'ctest -j{os.cpu_count()} -D ExperimentalTest -R klee_ --timeout {args.timeout}'] - print("Start making KLEE results...") + print("[KLEE] Start making KLEE results...") + make_build_path(["--klee"]) cddir = "cd " + BUILDABSPATH for strcmd in command_lst: cddir += " ; " + strcmd if args.debug: - print(cddir) + print(f'[Command] {cddir}') process = subprocess.Popen( '/bin/bash', stdin=subprocess.PIPE, @@ -152,13 +180,14 @@ def run_ctest_for_klee(): def run_ctest_for_smack(): cmake_conf = make_new_cmake_conf() command_lst = ["rm -rf *", cmake_conf, "ninja", - f'ctest -j{os.cpu_count()} -D ExperimentalTest -R smack_ --timeout {args.timeout}'] - print("Start making SMACK results...") + f'ctest -j{os.cpu_count()} -D ExperimentalTest -R smack_ --timeout 200'] + print("[SMACK] Start making SMACK results...") + make_build_path(["--smack"]) cddir = "cd " + BUILDABSPATH for strcmd in command_lst: cddir += " ; " + strcmd if args.debug: - print(cddir) + print(f'[Command] {cddir}') process = subprocess.Popen( '/bin/bash', stdin=subprocess.PIPE, @@ -209,18 +238,27 @@ def main(): parser.add_argument('--symbiotic', action='store_true', default=False) parser.add_argument('--bleed_edge', action='store_true', default=False) parser.add_argument('--debug', action='store_true', default=False) - parser.add_argument('--timeout', type=int, default=200, + parser.add_argument('--timeout', type=int, default=2000, help='Seconds before timeout for each test') subparsers = parser.add_subparsers( help='sub-commands help', dest="smack_parser") smack_parser = subparsers.add_parser('smack', help="smack help") smack_parser.add_argument('--smack', action='store_true', default=False) - smack_parser.add_argument('--precise', type=str, default='unbounded-integer', - help='Set Smack precise: unbounded-integer or bit vector') - smack_parser.add_argument('--checks', type=str, default='assertions', - help='Set Smack property checks: assertions or valid-deref') - smack_parser.add_argument('--mem_no_split', action='store_true', default=False, - help='Set Smack uses no memory split') + smack_parser.add_argument( + '--precise', + type=str, + default='unbounded-integer', + help='Set Smack precise: unbounded-integer or bit vector') + smack_parser.add_argument( + '--checks', + type=str, + default='assertions', + help='Set Smack property checks: assertions or valid-deref') + smack_parser.add_argument( + '--mem_no_split', + action='store_true', + default=False, + help='Set Smack uses no memory split') args, extra = parser.parse_known_args() if args.klee: args.seahorn = False From df96eaa14bccd0422ff4af56152ea59eba75800a Mon Sep 17 00:00:00 2001 From: yusen Date: Thu, 23 Sep 2021 04:04:14 +0000 Subject: [PATCH 07/24] fix(yaml): extend crab yaml by using sea_base.yaml --- seahorn/sea_crab_base.yaml | 38 -------------------------------------- verify.py.in | 11 +++++++---- 2 files changed, 7 insertions(+), 42 deletions(-) diff --git a/seahorn/sea_crab_base.yaml b/seahorn/sea_crab_base.yaml index a422e727..3e7441d0 100644 --- a/seahorn/sea_crab_base.yaml +++ b/seahorn/sea_crab_base.yaml @@ -1,47 +1,9 @@ verify_options: -# Optimization level - '-O3': '' -# -# PREPROCESSING -# - inline: '' - enable-loop-idiom: '' - enable-indvar: '' - no-lower-gv-init-struct: '' - externalize-addr-taken-functions: '' - no-kill-vaarg: '' - with-arith-overflow: true horn-unify-assumes: false -# do not instrument library functions with memory assertions - no-fat-fns: 'bcmp,memcpy,assert_bytes_match,ensure_linked_list_is_allocated,sea_aws_linked_list_is_valid' horn-gsa: false -# context-sensitive type-aware alias analysis - dsa: sea-cs-t -# weak support for function pointers. sea-dsa is better but less stable - devirt-functions: 'types' -# bit-precise memory-precise operational semantics - bmc: opsem - horn-vcgen-use-ite: '' # ignore control flow horn-vcgen-only-dataflow: false # reduce by data dependence horn-bmc-coi: false -# static allocator supports symbolic allocation size - sea-opsem-allocator: static -# stack allocation from a symbolic starting point - horn-explicit-sp0: false -# lambdas for memory - horn-bv2-lambdas: false -# use z3 simplifier during vcgen - horn-bv2-simplify: true -# wide memory manager to track pointer sizes - horn-bv2-extra-widemem: '' -# intermediate results in human readable form for debugging - '-S': '' -# keep intermediate results for debugging - keep-temps: '' - temp-dir: /tmp/verify-c-common -# time and result stats - horn-stats: true horn-bv2-crab-lower-is-deref: '' horn-bmc-crab-dom: 'zones' diff --git a/verify.py.in b/verify.py.in index 96431aae..3a61bc8f 100755 --- a/verify.py.in +++ b/verify.py.in @@ -92,10 +92,7 @@ def main(argv): 'yama', '--yforce'] # base config - if args.crab: - base_config = os.path.join(script_dir, 'seahorn', 'sea.crab.yaml') - else: - base_config = os.path.join(script_dir, 'seahorn', 'sea.yaml') + base_config = os.path.join(script_dir, 'seahorn', 'sea.yaml') if args.cex: base_config = os.path.join(script_dir, 'seahorn', 'sea.cex.yaml') @@ -106,6 +103,12 @@ def main(argv): vac_config = os.path.join(script_dir, 'seahorn', 'sea.vac.yaml') cmd.extend(['-y', vac_config]) + + # Crab in Opsem config + if args.crab: + crab_config = os.path.join(script_dir, 'seahorn', + 'sea.crab.yaml') + cmd.extend(['-y', crab_config]) # pcond config if args.pcond: From 779d5f6ef6d8a8a51b2543536266a28e565969b7 Mon Sep 17 00:00:00 2001 From: yusen Date: Tue, 28 Sep 2021 16:43:37 +0000 Subject: [PATCH 08/24] fix(py): update scripts to collect listing statistics --- scripts/get_exper_brunch_stat.py | 26 ++++++-------------------- scripts/get_exper_res.py | 7 ++----- scripts/metrics.yaml | 2 ++ 3 files changed, 10 insertions(+), 25 deletions(-) diff --git a/scripts/get_exper_brunch_stat.py b/scripts/get_exper_brunch_stat.py index 43e036e4..841c08b3 100644 --- a/scripts/get_exper_brunch_stat.py +++ b/scripts/get_exper_brunch_stat.py @@ -15,11 +15,12 @@ def read_brunchstat_from_log(log_file_name): line = log_file.readline() data = list() # list of metric_name->metric cur_test = None - first = False - second = False + if not use_crab: + del BRUNCH_DICT["crab.isderef.not.solve"] + del BRUNCH_DICT["crab.isderef.solve"] while line: # look for next test - new_test = re.search("Test: ", line) + new_test = re.search(BRUNCH_DICT["job_name"], line) if new_test: new_test_data = defaultdict(lambda: 'n/a') span = new_test.span() @@ -32,24 +33,9 @@ def read_brunchstat_from_log(log_file_name): stat = line.split() stat_name = " ".join(stat[1:-1]) if stat_name in metrics: - if stat_name == "crab.isderef.not.solve": - first = True - if stat_name == "crab.isderef.solve": - if not first: - data[cur_test].append(0) - first = True - second = True - if stat_name == "BMC": - if not first: - data[-1][stat_name] = 0 - if not second: - data[-1][stat_name] = 0 - first = False - second = False - stat_num = stat[-1] - data[-1][stat_name] = stat_num + stat_num = stat[-1] + data[-1][stat_name] = stat_num line = log_file.readline() - log_file.close() return data diff --git a/scripts/get_exper_res.py b/scripts/get_exper_res.py index 76e80090..bc758125 100644 --- a/scripts/get_exper_res.py +++ b/scripts/get_exper_res.py @@ -96,7 +96,6 @@ def collect_res_from_ctest(file_name): read_data_from_xml(res_data) write_data_into_csv( "{dir}/{file}".format(dir="../data", file=file_name), res_data) -<<<<<<< HEAD print("Done, find result csv file at: %s" % file_name) @@ -110,9 +109,6 @@ def extra_to_filename(extra, suffix='csv'): flag = flag[2:] parts.extend(flag.split('=')) return f'{"_".join(parts)}.{suffix}' -======= - print("[Results] Please find result csv file at: ../data/%s" % file_name) ->>>>>>> a12af01 (fix(py): update script to collect crab's results) def run_ctest_for_seahorn(): @@ -138,7 +134,8 @@ def run_ctest_for_seahorn(): stdout=get_output_level()) _ = process.communicate(cddir.encode()) collect_res_from_ctest(extra_to_filename(extra)) - collect_stat_from_ctest_log(extra_to_filename(extra, suffix='.brunch.csv')) + collect_stat_from_ctest_log(extra_to_filename(extra, suffix='.brunch.csv'), + True if "--crab" in extra else False) diff --git a/scripts/metrics.yaml b/scripts/metrics.yaml index 4563506f..ecca9169 100644 --- a/scripts/metrics.yaml +++ b/scripts/metrics.yaml @@ -2,6 +2,8 @@ job_name: "job_name" Result: "result" bmc.circ_sz: "bmc_circuit_size" bmc.dag_sz: "bmc_dag_size" +crab.isderef.not.solve: "crab.isderef.not.solve" # for crab +crab.isderef.solve: "crab.isderef.solve" # for crab BMC: "bmc_time" BMC.solve: "bmc_solve_time" Control dependence analysis: "cda_time" From 58fdd0d2bb7ae55d540751bf33d3b03ef45fedba Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Wed, 20 Apr 2022 11:52:10 -0400 Subject: [PATCH 09/24] feat(py): introduce new statistic to collect crab running time --- scripts/get_exper_brunch_stat.py | 5 ++--- scripts/get_exper_res.py | 3 +-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/scripts/get_exper_brunch_stat.py b/scripts/get_exper_brunch_stat.py index 841c08b3..5dd69135 100644 --- a/scripts/get_exper_brunch_stat.py +++ b/scripts/get_exper_brunch_stat.py @@ -15,9 +15,6 @@ def read_brunchstat_from_log(log_file_name): line = log_file.readline() data = list() # list of metric_name->metric cur_test = None - if not use_crab: - del BRUNCH_DICT["crab.isderef.not.solve"] - del BRUNCH_DICT["crab.isderef.solve"] while line: # look for next test new_test = re.search(BRUNCH_DICT["job_name"], line) @@ -36,6 +33,8 @@ def read_brunchstat_from_log(log_file_name): stat_num = stat[-1] data[-1][stat_name] = stat_num line = log_file.readline() + if cur_test: + data[cur_test] = move_dict_items_to_lst(row_dict) log_file.close() return data diff --git a/scripts/get_exper_res.py b/scripts/get_exper_res.py index bc758125..d05b56f9 100644 --- a/scripts/get_exper_res.py +++ b/scripts/get_exper_res.py @@ -138,8 +138,7 @@ def run_ctest_for_seahorn(): True if "--crab" in extra else False) - -def collect_stat_from_ctest_log(outfile): +def collect_stat_from_ctest_log(outfile, use_crab): test_tmpdir = os.path.join(BUILDABSPATH, 'Testing', 'Temporary') logfiles = [i for i in os.listdir(test_tmpdir)if os.path.isfile( os.path.join(test_tmpdir, i)) and i.startswith("LastTest_")] From aba9c219f9d5e244e27dff733af5cf1ee66d4c8d Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Mon, 9 May 2022 21:18:15 -0400 Subject: [PATCH 10/24] feat(crab): use macro to avoid crab infer 0 size allocation --- seahorn/lib/proof_allocators.c | 8 ++++---- seahorn/lib/sea_allocators.c | 3 +++ 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/seahorn/lib/proof_allocators.c b/seahorn/lib/proof_allocators.c index 1f1512ac..d94d7df3 100644 --- a/seahorn/lib/proof_allocators.c +++ b/seahorn/lib/proof_allocators.c @@ -13,16 +13,16 @@ void *realloc(void *ptr, size_t new_size) { return sea_realloc(ptr, new_size); } -void *bounded_malloc(size_t size) { +void *bounded_malloc_havoc(size_t size) { #ifdef __CRAB__ - assume(size > 1); + assume(size > 0); #endif return size == 0 ? NULL : sea_malloc_havoc_safe(size); } -void *can_fail_malloc(size_t size) { +void *can_fail_malloc_havoc(size_t size) { #ifdef __CRAB__ - assume(size > 1); + assume(size > 0); #endif return size == 0 ? NULL : sea_malloc_havoc(size); } diff --git a/seahorn/lib/sea_allocators.c b/seahorn/lib/sea_allocators.c index 828ddac5..5328e986 100644 --- a/seahorn/lib/sea_allocators.c +++ b/seahorn/lib/sea_allocators.c @@ -8,6 +8,9 @@ extern void memhavoc(void *, size_t); extern NONDET_FN_ATTR bool nd_malloc_is_fail(void); INLINE void *sea_malloc(size_t sz) { + #ifdef __CRAB__ + assume(sz > 0); + #endif return nd_malloc_is_fail() ? NULL : malloc(sz); } From 20639a1180bcb11f09151b542c0b176d63752e90 Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Mon, 9 May 2022 21:19:31 -0400 Subject: [PATCH 11/24] feat(yaml): adapt yaml config with new sea-crab options --- seahorn/sea_crab_base.yaml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/seahorn/sea_crab_base.yaml b/seahorn/sea_crab_base.yaml index 3e7441d0..15b6b3c5 100644 --- a/seahorn/sea_crab_base.yaml +++ b/seahorn/sea_crab_base.yaml @@ -5,5 +5,12 @@ verify_options: horn-vcgen-only-dataflow: false # reduce by data dependence horn-bmc-coi: false - horn-bv2-crab-lower-is-deref: '' +# Use crab solve sea.is_deref + horn-bv2-crab-check-is-deref: '' +# reduce used by object domain + horn-bv2-crab-reduce: '' +# Use crab infer allocation range + # horn-bv2-crab-rng: '' horn-bmc-crab-dom: 'zones' +# Statistics + # horn-bv2-crab-stats: '' \ No newline at end of file From a920e961e3c1b30820694f3b3f579fa01d179844 Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Fri, 13 May 2022 08:50:22 -0400 Subject: [PATCH 12/24] feat(crab): adpat malloc not fail for crab --- seahorn/lib/sea_allocators.c | 1 + 1 file changed, 1 insertion(+) diff --git a/seahorn/lib/sea_allocators.c b/seahorn/lib/sea_allocators.c index 5328e986..0480120a 100644 --- a/seahorn/lib/sea_allocators.c +++ b/seahorn/lib/sea_allocators.c @@ -10,6 +10,7 @@ extern NONDET_FN_ATTR bool nd_malloc_is_fail(void); INLINE void *sea_malloc(size_t sz) { #ifdef __CRAB__ assume(sz > 0); + return malloc(sz); #endif return nd_malloc_is_fail() ? NULL : malloc(sz); } From 491e332496792aaacfd6ee4b6e4d2ef62e2c2e18 Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Fri, 13 May 2022 08:51:17 -0400 Subject: [PATCH 13/24] feat(jobs): add hard work for crab --- seahorn/CMakeLists.txt | 5 ++ .../byte_buf_advance_crab/CMakeLists.txt | 5 ++ .../aws_byte_buf_advance_crab_harness.c | 57 +++++++++++++++++++ .../byte_buf_append_crab/CMakeLists.txt | 5 ++ .../aws_byte_buf_append_crab_harness.c | 55 ++++++++++++++++++ 5 files changed, 127 insertions(+) create mode 100644 seahorn/jobs_crab/byte_buf_advance_crab/CMakeLists.txt create mode 100644 seahorn/jobs_crab/byte_buf_advance_crab/aws_byte_buf_advance_crab_harness.c create mode 100644 seahorn/jobs_crab/byte_buf_append_crab/CMakeLists.txt create mode 100644 seahorn/jobs_crab/byte_buf_append_crab/aws_byte_buf_append_crab_harness.c diff --git a/seahorn/CMakeLists.txt b/seahorn/CMakeLists.txt index 4d2b8db6..0cc9973b 100644 --- a/seahorn/CMakeLists.txt +++ b/seahorn/CMakeLists.txt @@ -697,3 +697,8 @@ if(${SEA_WITH_BLEEDING_EDGE}) add_subdirectory(jobs/hash_table_foreach_deep_precise) endif() + +if(SEA_ENABLE_CRAB_JOBS) + add_subdirectory(jobs_crab/byte_buf_advance_crab) + add_subdirectory(jobs_crab/byte_buf_append_crab) +endif() diff --git a/seahorn/jobs_crab/byte_buf_advance_crab/CMakeLists.txt b/seahorn/jobs_crab/byte_buf_advance_crab/CMakeLists.txt new file mode 100644 index 00000000..4aa0d47a --- /dev/null +++ b/seahorn/jobs_crab/byte_buf_advance_crab/CMakeLists.txt @@ -0,0 +1,5 @@ +add_executable(byte_buf_advance_crab + ${AWS_C_COMMON_ROOT}/source/byte_buf.c + aws_byte_buf_advance_crab_harness.c) +sea_attach_bc_link(byte_buf_advance_crab) +sea_add_unsat_test(byte_buf_advance_crab) \ No newline at end of file diff --git a/seahorn/jobs_crab/byte_buf_advance_crab/aws_byte_buf_advance_crab_harness.c b/seahorn/jobs_crab/byte_buf_advance_crab/aws_byte_buf_advance_crab_harness.c new file mode 100644 index 00000000..a0fb157c --- /dev/null +++ b/seahorn/jobs_crab/byte_buf_advance_crab/aws_byte_buf_advance_crab_harness.c @@ -0,0 +1,57 @@ +/* + * + */ + +#include +#include +#include +#include + +extern void sea_dsa_alias(const void *p, ...); + +int main() { + /* parameters */ + struct aws_byte_buf buf; + initialize_byte_buf(&buf); + struct aws_byte_buf output; + size_t len = nd_size_t(); + + /* assumptions */ + assume(aws_byte_buf_is_valid(&buf)); + if (nd_bool()) { + output = buf; + } else { + initialize_byte_buf(&output); + assume(aws_byte_buf_is_valid(&output)); + } + + /* save current state of the parameters */ + struct aws_byte_buf old = buf; + struct store_byte_from_buffer old_byte_from_buf; + save_byte_from_array(buf.buffer, buf.len, &old_byte_from_buf); + + sea_dsa_alias(&old,&buf,&output); + + /* operation under verification */ + if (aws_byte_buf_advance(&buf, &output, len)) { + sassert(buf.len == old.len + len); + sassert(buf.capacity == old.capacity); + sassert(buf.allocator == old.allocator); + if (old.len > 0) { + assert_byte_from_buffer_matches(buf.buffer, &old_byte_from_buf); + } + sassert(output.len == 0); + sassert(output.capacity == len); + sassert(output.allocator == NULL); + } else { + assert_byte_buf_equivalence(&buf, &old, &old_byte_from_buf); + sassert(output.len == 0); + sassert(output.capacity == 0); + sassert(output.allocator == NULL); + sassert(output.buffer == NULL); + } + sassert(aws_byte_buf_is_valid(&buf)); + sassert(aws_byte_buf_is_valid(&output)); + + return 0; +} diff --git a/seahorn/jobs_crab/byte_buf_append_crab/CMakeLists.txt b/seahorn/jobs_crab/byte_buf_append_crab/CMakeLists.txt new file mode 100644 index 00000000..b8b5d8fa --- /dev/null +++ b/seahorn/jobs_crab/byte_buf_append_crab/CMakeLists.txt @@ -0,0 +1,5 @@ +add_executable(byte_buf_append_crab + ${AWS_C_COMMON_ROOT}/source/byte_buf.c + aws_byte_buf_append_crab_harness.c) +sea_attach_bc_link(byte_buf_append_crab) +sea_add_unsat_test(byte_buf_append_crab) diff --git a/seahorn/jobs_crab/byte_buf_append_crab/aws_byte_buf_append_crab_harness.c b/seahorn/jobs_crab/byte_buf_append_crab/aws_byte_buf_append_crab_harness.c new file mode 100644 index 00000000..90570049 --- /dev/null +++ b/seahorn/jobs_crab/byte_buf_append_crab/aws_byte_buf_append_crab_harness.c @@ -0,0 +1,55 @@ +/* + * + */ + +#include +#include +#include +#include + +extern void sea_dsa_alias(const void *p, ...); + +#define ARRAY_SIZE 5 + +int main() { + size_t nd_ary_sz = nd_size_t(); + assume(nd_ary_sz <= ARRAY_SIZE); + struct aws_byte_buf tos[nd_ary_sz]; + for (size_t i = 0; i < nd_ary_sz; i++) { + initialize_byte_buf(&tos[i]); + assume(aws_byte_buf_is_valid(&tos[i])); + } + + /* save current state of the data structure */ + struct aws_byte_buf tos_old[nd_ary_sz]; + memcpy(&tos_old, &tos, sizeof(struct aws_byte_buf) * nd_ary_sz); + + struct aws_byte_cursor froms[nd_ary_sz]; + for (size_t i = 0; i < nd_ary_sz; i++) { + initialize_byte_cursor(&froms[i]); + assume(aws_byte_cursor_is_valid(&froms[i])); + } + + /* save current state of the data structure */ + struct aws_byte_cursor froms_old[nd_ary_sz]; + memcpy(&froms_old, &froms, sizeof(struct aws_byte_cursor) * nd_ary_sz); + + for (size_t i = 0; i < nd_ary_sz; i++) { + if (aws_byte_buf_append(&tos[i], &froms[i]) == AWS_OP_SUCCESS) { + sassert(tos[i].len == tos_old[i].len + froms[i].len); + } else { + /* if the operation return an error, to must not change */ + // assert_bytes_match(tos_old[i].buffer, tos[i].buffer, tos[i].len); + sassert(tos_old[i].len == tos[i].len); + } + + sassert(aws_byte_buf_is_valid(&tos[i])); + sassert(aws_byte_cursor_is_valid(&froms[i])); + sassert(tos_old[i].allocator == tos[i].allocator); + sassert(tos_old[i].capacity == tos[i].capacity); + // assert_bytes_match(froms_old[i].ptr, froms[i].ptr, froms[i].len); + sassert(froms_old[i].len == froms[i].len); + } + + return 0; +} From aa898d354b98f194bece0b6c8ece5d5cfddd0209 Mon Sep 17 00:00:00 2001 From: Yusen Date: Tue, 7 Jun 2022 11:58:39 -0400 Subject: [PATCH 14/24] feat(crab): adpat assumption to no-disjunctive --- seahorn/lib/utils.c | 3 +++ 1 file changed, 3 insertions(+) diff --git a/seahorn/lib/utils.c b/seahorn/lib/utils.c index abbf6e26..20b31778 100644 --- a/seahorn/lib/utils.c +++ b/seahorn/lib/utils.c @@ -44,6 +44,9 @@ void assert_byte_from_buffer_matches( void save_byte_from_array(const uint8_t *const array, const size_t size, struct store_byte_from_buffer *const storage) { storage->index = nd_size_t(); + #ifdef __CRAB__ + assume(storage->index < size); + #endif if (size > 0 && array && storage) { assume(storage->index < size); storage->byte = array[storage->index]; From 0993691777ad49520e4ae9e99958b8a85dcfd4a5 Mon Sep 17 00:00:00 2001 From: Yusen Date: Tue, 7 Jun 2022 11:59:53 -0400 Subject: [PATCH 15/24] feat(scripts): update scripts to collect statistics for seapp --- scripts/metrics.yaml | 2 ++ seahorn/sea_crab_base.yaml | 9 ++++++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/scripts/metrics.yaml b/scripts/metrics.yaml index ecca9169..92c30124 100644 --- a/scripts/metrics.yaml +++ b/scripts/metrics.yaml @@ -4,6 +4,8 @@ bmc.circ_sz: "bmc_circuit_size" bmc.dag_sz: "bmc_dag_size" crab.isderef.not.solve: "crab.isderef.not.solve" # for crab crab.isderef.solve: "crab.isderef.solve" # for crab +crab.pp.isderef.not.solve: "pp.isderef.not.solve" +crab.pp.isderef.solve: "pp.isderef.solve" BMC: "bmc_time" BMC.solve: "bmc_solve_time" Control dependence analysis: "cda_time" diff --git a/seahorn/sea_crab_base.yaml b/seahorn/sea_crab_base.yaml index 15b6b3c5..9b0bc068 100644 --- a/seahorn/sea_crab_base.yaml +++ b/seahorn/sea_crab_base.yaml @@ -8,9 +8,12 @@ verify_options: # Use crab solve sea.is_deref horn-bv2-crab-check-is-deref: '' # reduce used by object domain - horn-bv2-crab-reduce: '' + # horn-bv2-crab-reduce: '' # Use crab infer allocation range # horn-bv2-crab-rng: '' - horn-bmc-crab-dom: 'zones' + horn-bmc-crab-dom: 'pk' # Statistics - # horn-bv2-crab-stats: '' \ No newline at end of file + # horn-bv2-crab-stats: '' + seapp-crab-lower-is-deref: true + seapp-crab-dom: 'zones' + seapp-crab-stats: true \ No newline at end of file From a88f47615ef1f4799bdaf14403a560c5a3e94f0f Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Fri, 17 Mar 2023 20:52:18 -0400 Subject: [PATCH 16/24] test(clam): add configuration for clam --- scripts/get_exper_res.py | 6 +++--- seahorn/CMakeLists.txt | 3 +++ seahorn/clam_base.yaml | 43 ++++++++++++++++++++++++++++++++++++++++ verify.py.in | 9 +++++++++ 4 files changed, 58 insertions(+), 3 deletions(-) create mode 100644 seahorn/clam_base.yaml diff --git a/scripts/get_exper_res.py b/scripts/get_exper_res.py index d05b56f9..5187887e 100644 --- a/scripts/get_exper_res.py +++ b/scripts/get_exper_res.py @@ -17,7 +17,7 @@ "--cex": "seahorn(cex).csv", "--cex --horn-bmc-solver=smt-y2": "seahorn(cex, smt-y2).csv", "klee": "klee.csv", "symbiotic": "symbiotic.csv"} - +LLVM_VERSION=14 def extra_to_filename(extra, suffix='.csv', prefix=''): '''extra: --a=b --c=d to filename: a.b.c.d.csv''' @@ -60,8 +60,8 @@ def make_new_cmake_conf(): -DSMACK_ENABLE_NO_MEM_SPLIT={smack_enable_no_mem_split}' else: smack_args = "" - return f'cmake -DSEA_LINK=llvm-link-10 -DCMAKE_C_COMPILER=clang-10\ - -DCMAKE_CXX_COMPILER=clang++-10 -DSEA_ENABLE_KLEE={use_klee} {smack_args}\ + return f'cmake -DSEA_LINK=llvm-link-{LLVM_VERSION} -DCMAKE_C_COMPILER=clang-{LLVM_VERSION}\ + -DCMAKE_CXX_COMPILER=clang++-{LLVM_VERSION} -DSEA_ENABLE_KLEE={use_klee} {smack_args}\ -DSEA_WITH_BLEEDING_EDGE={use_bleeding_edge} -DSEA_ENABLE_CRAB={use_crab}\ -DSEA_ENABLE_SYMBIOTIC={use_symbiotic} -DSEAHORN_ROOT={SEAHORN_ROOT} ../ -GNinja' diff --git a/seahorn/CMakeLists.txt b/seahorn/CMakeLists.txt index 0cc9973b..1d07ecd9 100644 --- a/seahorn/CMakeLists.txt +++ b/seahorn/CMakeLists.txt @@ -377,6 +377,7 @@ separate_arguments(VERIFY_FLAGS) # Unit test for testing unsat function(sea_add_unsat_test TARGET) sea_get_file_name(BC ${TARGET}.ir) + add_test(NAME "${TARGET}_clam_test" COMMAND ${VERIFY_CMD} --clam ${BC}) add_test(NAME "${TARGET}_unsat_test" COMMAND ${VERIFY_CMD} ${VERIFY_FLAGS} --expect=unsat ${BC}) endfunction() @@ -434,6 +435,7 @@ configure_file(sea_crab_base.yaml sea.crab.yaml @ONLY) configure_file(sea_cex_base.yaml sea.cex.yaml @ONLY) configure_file(sea_vac.yaml sea.vac.yaml @ONLY) configure_file(sea_pcond.yaml sea.pcond.yaml @ONLY) +configure_file(clam_base.yaml clam.base.yaml @ONLY) # compilation recipes for aws-c-common library add_subdirectory(aws-c-common-stubs) @@ -702,3 +704,4 @@ if(SEA_ENABLE_CRAB_JOBS) add_subdirectory(jobs_crab/byte_buf_advance_crab) add_subdirectory(jobs_crab/byte_buf_append_crab) endif() +# add_subdirectory(jobs_unsafe/hash_table_mem_leak) diff --git a/seahorn/clam_base.yaml b/seahorn/clam_base.yaml new file mode 100644 index 00000000..d6c7270d --- /dev/null +++ b/seahorn/clam_base.yaml @@ -0,0 +1,43 @@ +clam_options: +# CLANG +# + '-g' : '' +# Optimization level for seaopt + '-O3': '' + save-temps: '' + temp-dir: /tmp/vc-crab +# +# PREPROCESSING +# + lower-unsigned-icmp : '' + llvm-peel-loops: '1' + devirt-functions: 'sea-dsa' + crab-name-values: false +# +# ANALYSIS +# + crab-dom : 'int' + crab-inter : '' + crab-inter-recursive-functions : true + crab-inter-exact-summary-reuse : false + crab-inter-max-summaries : '9999999' + crab-inter-entry-main : true + crab-track : 'mem' + crab-singleton-aliases : '' + crab-heap-analysis : 'cs-sea-dsa-types' + crab-narrowing-iterations: '1' + crab-widening-delay: '2' + crab-widening-jump-set: '0' + crab-preserve-invariants: false + crab-dom-param: "region.is_dereferenceable=true:object.reduce_everywhere=true" + crab-check: 'bounds' +# +# OUTPUT +# +# crab-sanity-checks : '' + crab-disable-warnings: '' +# crab-print-invariants: false +# crab-stats: '' + # crab-verbose: 5 + # log: 'object-print:inter:object-forget:object-project:object-leq:object-meet:inter:object-join:object-copy:inter-group' + # ocrab: 'clam.crabir' \ No newline at end of file diff --git a/verify.py.in b/verify.py.in index 3a61bc8f..1ea9dc8d 100755 --- a/verify.py.in +++ b/verify.py.in @@ -61,6 +61,8 @@ def main(argv): help='Path condition mode') argp.add_argument('--crab', action='store_true', default=False, help='Using crab') + argp.add_argument('--clam', action='store_true', default=False, + help='Using clam as front end') argp.add_argument('input_file', nargs=1) argp.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, @@ -125,6 +127,13 @@ def main(argv): cmd.append('--dry-run') cmd.append(args.command) + # Clam config + if args.clam: + cmd = ['/home/ljgy/Works/seahorn/clam/build/run/bin/clam-yaml.py'] + clam_config = os.path.join(script_dir, 'seahorn', + 'clam.base.yaml') + cmd.extend(['-y', clam_config]) + cmd.extend(extra) cmd.append(input_file) From 01b87892f431757456072a924094d84c74fdd817 Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Wed, 5 Jan 2022 01:27:53 -0500 Subject: [PATCH 17/24] feat(jobs): add symbiotic to cover all workable unit proofs --- seahorn/jobs/byte_buf_secure_zero/CMakeLists.txt | 2 +- seahorn/jobs/hash_byte_cursor_ptr/CMakeLists.txt | 8 ++++---- .../CMakeLists.txt | 8 ++++---- seahorn/jobs/hash_callback_c_str_eq/CMakeLists.txt | 12 ++++++------ .../jobs/hash_callback_string_eq/CMakeLists.txt | 12 ++++++------ seahorn/jobs/ptr_eq/CMakeLists.txt | 6 +++--- seahorn/jobs/string_new_from_array/CMakeLists.txt | 14 +++++++------- 7 files changed, 31 insertions(+), 31 deletions(-) diff --git a/seahorn/jobs/byte_buf_secure_zero/CMakeLists.txt b/seahorn/jobs/byte_buf_secure_zero/CMakeLists.txt index 1a96fde7..9bc0beeb 100644 --- a/seahorn/jobs/byte_buf_secure_zero/CMakeLists.txt +++ b/seahorn/jobs/byte_buf_secure_zero/CMakeLists.txt @@ -22,6 +22,6 @@ sea_add_smack(byte_buf_secure_zero # symbiotic sea_add_symbiotic(byte_buf_secure_zero - ${AWS_C_COMMON_ROOT}/source/byte_buf.c ${AWS_C_COMMON_ROOT}/source/common.c + ${AWS_C_COMMON_ROOT}/source/byte_buf.c aws_byte_buf_secure_zero_harness.c) \ No newline at end of file diff --git a/seahorn/jobs/hash_byte_cursor_ptr/CMakeLists.txt b/seahorn/jobs/hash_byte_cursor_ptr/CMakeLists.txt index 14069cdb..0d94a5f1 100644 --- a/seahorn/jobs/hash_byte_cursor_ptr/CMakeLists.txt +++ b/seahorn/jobs/hash_byte_cursor_ptr/CMakeLists.txt @@ -23,7 +23,7 @@ sea_add_smack(hash_byte_cursor_ptr aws_hash_byte_cursor_ptr_harness.c) # symbiotic -sea_add_symbiotic(hash_byte_cursor_ptr - ${AWS_C_COMMON_ROOT}/source/byte_buf.c - ${AWS_C_COMMON_ROOT}/source/hash_table.c - aws_hash_byte_cursor_ptr_harness.c) \ No newline at end of file +# sea_add_symbiotic(hash_byte_cursor_ptr +# ${AWS_C_COMMON_ROOT}/source/byte_buf.c +# ${AWS_C_COMMON_ROOT}/source/hash_table.c +# aws_hash_byte_cursor_ptr_harness.c) diff --git a/seahorn/jobs/hash_byte_cursor_ptr_ignore_case/CMakeLists.txt b/seahorn/jobs/hash_byte_cursor_ptr_ignore_case/CMakeLists.txt index 9461bc75..c18012f3 100644 --- a/seahorn/jobs/hash_byte_cursor_ptr_ignore_case/CMakeLists.txt +++ b/seahorn/jobs/hash_byte_cursor_ptr_ignore_case/CMakeLists.txt @@ -23,7 +23,7 @@ sea_add_smack(hash_byte_cursor_ptr_ignore_case aws_hash_byte_cursor_ptr_ignore_case_harness.c) # symbiotic -sea_add_symbiotic(hash_byte_cursor_ptr_ignore_case - ${AWS_C_COMMON_ROOT}/source/byte_buf.c - ${AWS_C_COMMON_ROOT}/source/hash_table.c - aws_hash_byte_cursor_ptr_ignore_case_harness.c) \ No newline at end of file +# sea_add_symbiotic(hash_byte_cursor_ptr_ignore_case +# ${AWS_C_COMMON_ROOT}/source/byte_buf.c +# ${AWS_C_COMMON_ROOT}/source/hash_table.c +# aws_hash_byte_cursor_ptr_ignore_case_harness.c) diff --git a/seahorn/jobs/hash_callback_c_str_eq/CMakeLists.txt b/seahorn/jobs/hash_callback_c_str_eq/CMakeLists.txt index 6b8fdccc..2a3db9f3 100644 --- a/seahorn/jobs/hash_callback_c_str_eq/CMakeLists.txt +++ b/seahorn/jobs/hash_callback_c_str_eq/CMakeLists.txt @@ -29,9 +29,9 @@ sea_add_smack(hash_callback_c_str_eq aws_hash_callback_c_str_eq_harness.c) # symbiotic -sea_add_symbiotic(hash_callback_c_str_eq - ${AWS_C_COMMON_ROOT}/source/byte_buf.c - ${AWS_C_COMMON_ROOT}/source/string.c - ${AWS_C_COMMON_ROOT}/source/common.c - ${AWS_C_COMMON_ROOT}/source/hash_table.c - aws_hash_callback_c_str_eq_harness.c) \ No newline at end of file +# sea_add_symbiotic(hash_callback_c_str_eq +# ${AWS_C_COMMON_ROOT}/source/byte_buf.c +# ${AWS_C_COMMON_ROOT}/source/string.c +# ${AWS_C_COMMON_ROOT}/source/common.c +# ${AWS_C_COMMON_ROOT}/source/hash_table.c +# aws_hash_callback_c_str_eq_harness.c) diff --git a/seahorn/jobs/hash_callback_string_eq/CMakeLists.txt b/seahorn/jobs/hash_callback_string_eq/CMakeLists.txt index 2c42dee8..4b16e3a9 100644 --- a/seahorn/jobs/hash_callback_string_eq/CMakeLists.txt +++ b/seahorn/jobs/hash_callback_string_eq/CMakeLists.txt @@ -32,9 +32,9 @@ if(SEA_ENABLE_SMACK) endif() # symbiotic -sea_add_symbiotic(hash_callback_string_eq - ${AWS_C_COMMON_ROOT}/source/byte_buf.c - ${AWS_C_COMMON_ROOT}/source/string.c - ${AWS_C_COMMON_ROOT}/source/common.c - ${AWS_C_COMMON_ROOT}/source/hash_table.c - aws_hash_callback_string_eq_harness.c) \ No newline at end of file +# sea_add_symbiotic(hash_callback_string_eq +# ${AWS_C_COMMON_ROOT}/source/byte_buf.c +# ${AWS_C_COMMON_ROOT}/source/string.c +# ${AWS_C_COMMON_ROOT}/source/common.c +# ${AWS_C_COMMON_ROOT}/source/hash_table.c +# aws_hash_callback_string_eq_harness.c) diff --git a/seahorn/jobs/ptr_eq/CMakeLists.txt b/seahorn/jobs/ptr_eq/CMakeLists.txt index 67bbc07d..96921f1d 100644 --- a/seahorn/jobs/ptr_eq/CMakeLists.txt +++ b/seahorn/jobs/ptr_eq/CMakeLists.txt @@ -15,6 +15,6 @@ sea_add_smack(ptr_eq aws_ptr_eq_harness.c) # symbiotic -sea_add_symbiotic(ptr_eq - ${AWS_C_COMMON_ROOT}/source/hash_table.c - aws_ptr_eq_harness.c) \ No newline at end of file +# sea_add_symbiotic(ptr_eq +# ${AWS_C_COMMON_ROOT}/source/hash_table.c +# aws_ptr_eq_harness.c) diff --git a/seahorn/jobs/string_new_from_array/CMakeLists.txt b/seahorn/jobs/string_new_from_array/CMakeLists.txt index d6e8a0f0..498c3340 100644 --- a/seahorn/jobs/string_new_from_array/CMakeLists.txt +++ b/seahorn/jobs/string_new_from_array/CMakeLists.txt @@ -35,10 +35,10 @@ sea_add_smack( ) # symbiotic -sea_add_symbiotic( - string_new_from_array - ${AWS_C_COMMON_ROOT}/source/byte_buf.c - ${AWS_C_COMMON_ROOT}/source/string.c - ${AWS_C_COMMON_ROOT}/source/common.c - aws_string_new_from_array_harness.c -) \ No newline at end of file +# sea_add_symbiotic( +# string_new_from_array +# ${AWS_C_COMMON_ROOT}/source/byte_buf.c +# ${AWS_C_COMMON_ROOT}/source/string.c +# ${AWS_C_COMMON_ROOT}/source/common.c +# aws_string_new_from_array_harness.c +# ) From 8473f4a9b4a1e0326e86aa5d3d38fb647cf9ebcc Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Mon, 10 Jan 2022 15:32:10 -0500 Subject: [PATCH 18/24] feat(scripts): add symbiotic into experimental script --- scripts/get_exper_res.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/get_exper_res.py b/scripts/get_exper_res.py index 5187887e..e84d998c 100644 --- a/scripts/get_exper_res.py +++ b/scripts/get_exper_res.py @@ -176,9 +176,8 @@ def run_ctest_for_klee(): def run_ctest_for_smack(): cmake_conf = make_new_cmake_conf() command_lst = ["rm -rf *", cmake_conf, "ninja", - f'ctest -j{os.cpu_count()} -D ExperimentalTest -R smack_ --timeout 200'] - print("[SMACK] Start making SMACK results...") - make_build_path(["--smack"]) + f'ctest -j{os.cpu_count()} -D ExperimentalTest -R smack_ --timeout {args.timeout}'] + print("Start making SMACK results...") cddir = "cd " + BUILDABSPATH for strcmd in command_lst: cddir += " ; " + strcmd From d8d29804209396a70ce7a4fa25c64e1c78766c13 Mon Sep 17 00:00:00 2001 From: yusen Date: Thu, 23 Sep 2021 04:01:21 +0000 Subject: [PATCH 19/24] fix(py): update script to collect crab's results --- scripts/get_exper_res.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/scripts/get_exper_res.py b/scripts/get_exper_res.py index e84d998c..8acaf5fc 100644 --- a/scripts/get_exper_res.py +++ b/scripts/get_exper_res.py @@ -40,6 +40,27 @@ def make_build_path(extra, build_path='../build'): BUILDABSPATH = os.path.abspath(build_path) +def extra_to_filename(extra, suffix='.csv', prefix=''): + '''extra: --a=b --c=d to filename: a.b.c.d.csv''' + if (len(extra) == 0): + return f'base{suffix}' + parts = [] + for flag in extra: + if flag.startswith('--'): + flag = flag[2:] + parts.extend(flag.split('=')) + return f'{prefix}{"_".join(parts)}{suffix}' + + +def make_build_path(extra, build_path='../build'): + build_path += extra_to_filename(extra, '', '_') if extra else '_base' + print(f'[Build Path] {build_path}') + if not os.path.exists(build_path): + os.mkdir(build_path) + global BUILDABSPATH + BUILDABSPATH = os.path.abspath(build_path) + + def get_output_level(): if args.debug: output_level = sys.stdout @@ -178,6 +199,7 @@ def run_ctest_for_smack(): command_lst = ["rm -rf *", cmake_conf, "ninja", f'ctest -j{os.cpu_count()} -D ExperimentalTest -R smack_ --timeout {args.timeout}'] print("Start making SMACK results...") + make_build_path(["--smack"]) cddir = "cd " + BUILDABSPATH for strcmd in command_lst: cddir += " ; " + strcmd From eebe6caed639fbb4254e4e608e2bc30e933717ee Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Fri, 17 Mar 2023 20:52:18 -0400 Subject: [PATCH 20/24] test(clam): add configuration for clam --- scripts/get_exper_res.py | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/scripts/get_exper_res.py b/scripts/get_exper_res.py index 8acaf5fc..d1262566 100644 --- a/scripts/get_exper_res.py +++ b/scripts/get_exper_res.py @@ -40,27 +40,6 @@ def make_build_path(extra, build_path='../build'): BUILDABSPATH = os.path.abspath(build_path) -def extra_to_filename(extra, suffix='.csv', prefix=''): - '''extra: --a=b --c=d to filename: a.b.c.d.csv''' - if (len(extra) == 0): - return f'base{suffix}' - parts = [] - for flag in extra: - if flag.startswith('--'): - flag = flag[2:] - parts.extend(flag.split('=')) - return f'{prefix}{"_".join(parts)}{suffix}' - - -def make_build_path(extra, build_path='../build'): - build_path += extra_to_filename(extra, '', '_') if extra else '_base' - print(f'[Build Path] {build_path}') - if not os.path.exists(build_path): - os.mkdir(build_path) - global BUILDABSPATH - BUILDABSPATH = os.path.abspath(build_path) - - def get_output_level(): if args.debug: output_level = sys.stdout From 847024bb6892f0b66852426db72ec371325e4d6d Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Wed, 21 Feb 2024 14:42:45 -0500 Subject: [PATCH 21/24] feat(jobs): add more crab settings --- CMakeLists.txt | 11 +- scripts/get_exper_brunch_stat.py | 31 ++- scripts/get_exper_res.py | 53 ++-- scripts/metrics.yaml | 6 +- seahorn/CMakeLists.txt | 253 ++++++++++++------ seahorn/aws-c-common-stubs/CMakeLists.txt | 20 +- seahorn/clam_base.yaml | 13 +- seahorn/include/bounds.h | 2 + seahorn/include/config.h.in | 4 + .../byte_cursor_advance_nospec/CMakeLists.txt | 2 + seahorn/jobs/hash_iter_done/CMakeLists.txt | 2 +- .../aws_byte_buf_advance_crab_harness.c | 41 +-- .../aws_byte_buf_append_crab_harness.c | 75 +++--- seahorn/lib/CMakeLists.txt | 1 + seahorn/lib/bcmp.c | 16 +- seahorn/lib/bounds.c | 3 +- seahorn/sea.yaml | 47 +++- seahorn/sea_cex_base.yaml | 2 +- seahorn/sea_crab_base.yaml | 17 +- verify.py.in | 4 +- 20 files changed, 424 insertions(+), 179 deletions(-) mode change 120000 => 100644 seahorn/sea.yaml diff --git a/CMakeLists.txt b/CMakeLists.txt index 8220036d..3d80db44 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -14,7 +14,8 @@ option(SEA_LLVM_MEM_BUILTINS "Use LLVM builtins memcpy/memmove without memory sa option(SEA_ENABLE_FUZZ "Enable fuzzing" OFF) -option(SEA_ENABLE_CRAB "Enable Seahorn using Clam/Crab" OFF) +option(SEA_ENABLE_CRAB "Enable Seahorn using Crab" OFF) +option(SEA_ENABLE_CLAM "Enable Seahorn using Clam" OFF) option(SEA_WITH_BLEEDING_EDGE "Enable bleeding edge proofs" OFF) option(SEA_ENABLE_KLEE "Enable klee" OFF) @@ -58,6 +59,10 @@ set(SEA_PP "${SEAHORN_ROOT}/bin/seapp" CACHE STRING "Path to seapp binary") set(LLVMIR_OPT ${SEA_OPT}) set(AWS_C_COMMON_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/aws-c-common) +set(AWS_C_COMPRESSION_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/aws-c-compression) +set(AWS_C_SDKUTILS_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/aws-c-sdkutils) +set(AWS_C_IO_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/aws-c-io) +set(AWS_C_CAL_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/aws-c-cal) if(SEA_ENABLE_FUZZ) find_package(aws-c-common REQUIRED) @@ -75,5 +80,9 @@ include_directories(BEFORE ${CMAKE_CURRENT_BINARY_DIR}/include) include_directories(seahorn/include) include_directories(aws-c-common/include) +include_directories(aws-c-compression/include) +include_directories(aws-c-sdkutils/include) +include_directories(aws-c-io/include) +include_directories(aws-c-cal/include) include_directories(${SEAHORN_ROOT}/include) add_subdirectory(seahorn) diff --git a/scripts/get_exper_brunch_stat.py b/scripts/get_exper_brunch_stat.py index 5dd69135..f61f2342 100644 --- a/scripts/get_exper_brunch_stat.py +++ b/scripts/get_exper_brunch_stat.py @@ -17,7 +17,7 @@ def read_brunchstat_from_log(log_file_name): cur_test = None while line: # look for next test - new_test = re.search(BRUNCH_DICT["job_name"], line) + new_test = re.search("Testing: ", line) if new_test: new_test_data = defaultdict(lambda: 'n/a') span = new_test.span() @@ -33,8 +33,7 @@ def read_brunchstat_from_log(log_file_name): stat_num = stat[-1] data[-1][stat_name] = stat_num line = log_file.readline() - if cur_test: - data[cur_test] = move_dict_items_to_lst(row_dict) + log_file.close() return data @@ -117,6 +116,30 @@ def write_brunchstat_into_csv(data, out_file): row = (job_data[k] for k, _ in metric_serlz) writer.writerow(row) + +# def read_timimg_results(result_dir): +# log_file = open(log_file_name, 'r') +# line = log_file.readline() +# data = defaultdict(list) +# row_dict = {} +# while line: +# if "************** ANALYSIS RESULTS ****************" in line: +# result_status = Status.SUCCESS +# if "** OS Error:" in line: +# result_status = Status.TIMEOUT +# if "BRUNCH_STAT" in line: +# parts = line.strip().split() +# name = parts[1] +# if name == "Clam": +# number = float(parts[2]) +# if result_status == Status.CLAM_ERROR: +# pairs.append([dirpath.strip().split('/')[1], "error"]) +# elif result_status == Status.TIMEOUT: +# pairs.append([dirpath.strip().split('/')[1], "timeout"]) +# else: +# pairs.append([dirpath.strip().split('/')[1], number]) +# draw_results_table(pairs, result_dir) + if __name__ == "__main__": parser = argparse.ArgumentParser( description='in and out files') @@ -129,4 +152,4 @@ def write_brunchstat_into_csv(data, out_file): help="ouput csv filename") args = parser.parse_args() data = read_brunchstat_from_log(args.logfile) - write_brunchstat_into_csv(data, args.outfile) + write_brunchstat_into_csv(data, args.outfile) \ No newline at end of file diff --git a/scripts/get_exper_res.py b/scripts/get_exper_res.py index d1262566..c057ca94 100644 --- a/scripts/get_exper_res.py +++ b/scripts/get_exper_res.py @@ -5,18 +5,18 @@ import glob import subprocess import argparse -from get_exper_brunch_stat import * +from get_exper_brunch_stat import read_brunchstat_from_log, write_brunchstat_into_csv -BUILDABSPATH = os.path.abspath('../exper/') +BUILDABSPATH = os.path.abspath('../build/') DATAABSPATH = os.path.abspath('../') + "/data" -SEAHORN_ROOT = '../../build-seahorn' # Put your seahorn root dir here +SEAHORN_ROOT = '/home/yusen/seawork/seahorn/build/run' # Put your seahorn root dir here FILE_DICT = { "": "seahorn.csv", "--vac": "seahorn(vac).csv", "--horn-bmc-solver=smt-y2": "seahorn(smt-y2).csv", "--cex": "seahorn(cex).csv", "--cex --horn-bmc-solver=smt-y2": "seahorn(cex, smt-y2).csv", - "klee": "klee.csv", "symbiotic": "symbiotic.csv"} + "klee": "klee.csv", "symbiotic": "symbiotic.csv", "clam": "clam.csv"} LLVM_VERSION=14 def extra_to_filename(extra, suffix='.csv', prefix=''): @@ -52,7 +52,7 @@ def make_new_cmake_conf(): use_klee = "ON" if args.klee else "OFF" use_symbiotic = "ON" if args.symbiotic else "OFF" use_bleeding_edge = "ON" if args.bleed_edge else "OFF" - use_crab = "ON" if "--crab" in extra else "OFF" + # use_crab = "ON" if "--crab" in extra else "OFF" if args.smack_parser: use_smack = "ON" if args.smack else "OFF" smack_enable_no_mem_split = "ON" if args.mem_no_split else "OFF" @@ -62,7 +62,7 @@ def make_new_cmake_conf(): smack_args = "" return f'cmake -DSEA_LINK=llvm-link-{LLVM_VERSION} -DCMAKE_C_COMPILER=clang-{LLVM_VERSION}\ -DCMAKE_CXX_COMPILER=clang++-{LLVM_VERSION} -DSEA_ENABLE_KLEE={use_klee} {smack_args}\ - -DSEA_WITH_BLEEDING_EDGE={use_bleeding_edge} -DSEA_ENABLE_CRAB={use_crab}\ + -DSEA_WITH_BLEEDING_EDGE={use_bleeding_edge} -DSEA_ENABLE_CRAB=OFF\ -DSEA_ENABLE_SYMBIOTIC={use_symbiotic} -DSEAHORN_ROOT={SEAHORN_ROOT} ../ -GNinja' @@ -99,18 +99,6 @@ def collect_res_from_ctest(file_name): print("Done, find result csv file at: %s" % file_name) -def extra_to_filename(extra, suffix='csv'): - '''extra: --a=b --c=d to filename: a.b.c.d.csv''' - if (len(extra) == 0): - return f'base.{suffix}' - parts = [] - for flag in extra: - if flag.startswith('--'): - flag = flag[2:] - parts.extend(flag.split('=')) - return f'{"_".join(parts)}.{suffix}' - - def run_ctest_for_seahorn(): print("[SeaHorn] Start making SeaHorn results...") set_env = '' @@ -120,7 +108,7 @@ def run_ctest_for_seahorn(): set_env = f'env VERIFY_FLAGS=\"{verify_flags}\"' cmake_conf = make_new_cmake_conf() command_lst = ["rm -rf *", cmake_conf, "ninja", - f'{set_env} ctest -j{os.cpu_count()} -D ExperimentalTest -R . --timeout {args.timeout}'] + f'{set_env} ctest -j 12 -D ExperimentalTest -R . --timeout {args.timeout}'] make_build_path(extra) cddir = "cd " + BUILDABSPATH for strcmd in command_lst: @@ -137,6 +125,22 @@ def run_ctest_for_seahorn(): collect_stat_from_ctest_log(extra_to_filename(extra, suffix='.brunch.csv'), True if "--crab" in extra else False) +def run_ctest_for_clam(): + print("[Clam] Start making Clam results...") + cmake_conf = make_new_cmake_conf() + command_lst = ["rm -rf *", cmake_conf, "ninja", + f'ctest -j{os.cpu_count()} -D ExperimentalTest -R smack_ --timeout 200'] + make_build_path(["--clam"]) + for strcmd in command_lst: + cddir += " ; " + strcmd + if args.debug: + print(f'[Command] {cddir}') + process = subprocess.Popen( + '/bin/bash', + stdin=subprocess.PIPE, + stdout=get_output_level()) + _ = process.communicate(cddir.encode()) + collect_res_from_ctest(FILE_DICT["klee"]) def collect_stat_from_ctest_log(outfile, use_crab): test_tmpdir = os.path.join(BUILDABSPATH, 'Testing', 'Temporary') @@ -176,8 +180,8 @@ def run_ctest_for_klee(): def run_ctest_for_smack(): cmake_conf = make_new_cmake_conf() command_lst = ["rm -rf *", cmake_conf, "ninja", - f'ctest -j{os.cpu_count()} -D ExperimentalTest -R smack_ --timeout {args.timeout}'] - print("Start making SMACK results...") + f'ctest -j{os.cpu_count()} -D ExperimentalTest -R smack_ --timeout 200'] + print("[SMACK] Start making SMACK results...") make_build_path(["--smack"]) cddir = "cd " + BUILDABSPATH for strcmd in command_lst: @@ -193,7 +197,6 @@ def run_ctest_for_smack(): collect_res_from_ctest(extra_to_filename( [str(args.precise), str(args.checks), mem_split])) - def run_ctest_for_symbiotic(): cmake_conf = make_new_cmake_conf() command_lst = ["rm -rf *", cmake_conf, "ninja", @@ -211,7 +214,6 @@ def run_ctest_for_symbiotic(): _ = process.communicate(cddir.encode()) collect_stat_from_ctest_log(FILE_DICT["symbiotic"]) - def main(): os.makedirs(DATAABSPATH, exist_ok=True) os.makedirs(BUILDABSPATH, exist_ok=True) @@ -223,6 +225,8 @@ def main(): run_ctest_for_smack() if args.symbiotic: run_ctest_for_symbiotic() + if args.clam: + run_ctest_for_clam() if __name__ == "__main__": @@ -232,6 +236,7 @@ def main(): parser.add_argument('--seahorn', action='store_true', default=True) parser.add_argument('--klee', action='store_true', default=False) parser.add_argument('--symbiotic', action='store_true', default=False) + parser.add_argument('--clam', action='store_true', default=False) parser.add_argument('--bleed_edge', action='store_true', default=False) parser.add_argument('--debug', action='store_true', default=False) parser.add_argument('--timeout', type=int, default=2000, @@ -262,4 +267,4 @@ def main(): args.seahorn = False if args.symbiotic: args.seahorn = False - main() + main() \ No newline at end of file diff --git a/scripts/metrics.yaml b/scripts/metrics.yaml index 92c30124..ca828360 100644 --- a/scripts/metrics.yaml +++ b/scripts/metrics.yaml @@ -2,10 +2,12 @@ job_name: "job_name" Result: "result" bmc.circ_sz: "bmc_circuit_size" bmc.dag_sz: "bmc_dag_size" -crab.isderef.not.solve: "crab.isderef.not.solve" # for crab -crab.isderef.solve: "crab.isderef.solve" # for crab +crab.opsem.isderef.not.solve: "opsem.isderef.not.solve" # for crab +crab.opsem.isderef.solve: "opsem.isderef.solve" # for crab crab.pp.isderef.not.solve: "pp.isderef.not.solve" crab.pp.isderef.solve: "pp.isderef.solve" +opsem.crab: "opsem_crab_time" +seapp.crab: "pp_crab_time" BMC: "bmc_time" BMC.solve: "bmc_solve_time" Control dependence analysis: "cda_time" diff --git a/seahorn/CMakeLists.txt b/seahorn/CMakeLists.txt index 1d07ecd9..1b57021f 100644 --- a/seahorn/CMakeLists.txt +++ b/seahorn/CMakeLists.txt @@ -377,7 +377,9 @@ separate_arguments(VERIFY_FLAGS) # Unit test for testing unsat function(sea_add_unsat_test TARGET) sea_get_file_name(BC ${TARGET}.ir) - add_test(NAME "${TARGET}_clam_test" COMMAND ${VERIFY_CMD} --clam ${BC}) + if(SEA_ENABLE_CLAM) + add_test(NAME "${TARGET}_clam_test" COMMAND ${VERIFY_CMD} --clam ${BC}) + endif() add_test(NAME "${TARGET}_unsat_test" COMMAND ${VERIFY_CMD} ${VERIFY_FLAGS} --expect=unsat ${BC}) endfunction() @@ -406,7 +408,13 @@ set(FUZZ_MAX_STRING_LEN 128) set(KLEE_MAX_SIZE 3) # Temporary set the maximum size of array list is 3 due to KLEE timeout set(MAX_ITEM_SIZE 2) set(MAX_ITEM_SIZE_FUZZ 256) -set(MAX_INITIAL_ITEM_ALLOCATION 9223372036854775808ULL) +if(SEA_ENABLE_CRAB OR SEA_ENABLE_CLAM) + set(MAX_INITIAL_ITEM_ALLOCATION 30) + set(MAX_ARRAY_SIZE 10) +else() + set(MAX_INITIAL_ITEM_ALLOCATION 9223372036854775808ULL) + set(MAX_ARRAY_SIZE 1) +endif() set(MAX_INITIAL_ITEM_ALLOCATION_FUZZ 10) set(MAX_TABLE_SIZE 4) set(MAX_PRIORITY_QUEUE_ITEMS 5) @@ -606,80 +614,78 @@ add_subdirectory(jobs/priority_queue_clean_up) add_subdirectory(jobs/ring_buffer_clean_up) add_subdirectory(jobs/ring_buffer_init) -add_subdirectory(jobs2/array_eq2) -add_subdirectory(jobs2/array_eq_c_str2) -add_subdirectory(jobs2/array_eq_c_str_ignore_case2) -add_subdirectory(jobs2/array_eq_ignore_case2) -add_subdirectory(jobs2/array_list_back2) -add_subdirectory(jobs2/array_list_capacity2) -add_subdirectory(jobs2/array_list_ensure_capacity2) -add_subdirectory(jobs2/array_list_front2) -add_subdirectory(jobs2/array_list_get_at2) -add_subdirectory(jobs2/array_list_get_at_ptr2) -add_subdirectory(jobs2/array_list_init_static2) -add_subdirectory(jobs2/array_list_length2) -add_subdirectory(jobs2/array_list_pop_back2) -add_subdirectory(jobs2/array_list_pop_front2) -add_subdirectory(jobs2/array_list_push_back2) -add_subdirectory(jobs2/array_list_set_at2) -add_subdirectory(jobs2/array_list_shrink_to_fit2) -add_subdirectory(jobs2/array_list_sort2) -add_subdirectory(jobs2/byte_buf_eq2) -add_subdirectory(jobs2/byte_buf_advance2) -add_subdirectory(jobs2/byte_buf_append2) -add_subdirectory(jobs2/byte_buf_append_dynamic2) -add_subdirectory(jobs2/byte_buf_append_with_lookup2) -add_subdirectory(jobs2/byte_buf_cat2) -add_subdirectory(jobs2/byte_buf_eq_c_str2) -add_subdirectory(jobs2/byte_buf_eq_c_str_ignore_case2) -add_subdirectory(jobs2/byte_buf_eq_ignore_case2) -add_subdirectory(jobs2/byte_buf_init_copy2) -add_subdirectory(jobs2/array_list_clear2) -add_subdirectory(jobs2/array_list_erase2) -add_subdirectory(jobs2/array_list_pop_front_n2) -add_subdirectory(jobs2/byte_buf_write2) -add_subdirectory(jobs2/byte_buf_write_be162) -add_subdirectory(jobs2/byte_buf_write_be322) -add_subdirectory(jobs2/byte_buf_write_be642) -add_subdirectory(jobs2/byte_buf_write_from_whole_buffer2) -add_subdirectory(jobs2/byte_buf_write_from_whole_cursor2) -add_subdirectory(jobs2/byte_buf_write_from_whole_string2) -add_subdirectory(jobs2/byte_buf_write_u82) -add_subdirectory(jobs2/byte_cursor_advance2) -add_subdirectory(jobs2/byte_cursor_read2) -add_subdirectory(jobs2/byte_cursor_read_u8_2) -add_subdirectory(jobs2/byte_cursor_eq2) -add_subdirectory(jobs2/byte_cursor_from_buf2) -add_subdirectory(jobs2/byte_cursor_read_be32_2) -add_subdirectory(jobs2/byte_cursor_eq_byte_buf_ignore_case2) -add_subdirectory(jobs2/byte_cursor_eq_c_str2) -add_subdirectory(jobs2/byte_cursor_read_be64_2) -add_subdirectory(jobs2/byte_cursor_compare_lookup2) -add_subdirectory(jobs2/byte_cursor_trim_pred2) -add_subdirectory(jobs2/byte_cursor_left_trim_pred2) -add_subdirectory(jobs2/byte_cursor_advance_nospec2) -add_subdirectory(jobs2/byte_cursor_read_be16_2) -add_subdirectory(jobs2/byte_cursor_eq_c_str_ignore_case2) -add_subdirectory(jobs2/byte_cursor_eq_byte_buf2) -add_subdirectory(jobs2/byte_cursor_eq_ignore_case2) -add_subdirectory(jobs2/byte_cursor_right_trim_pred2) -add_subdirectory(jobs2/byte_cursor_read_and_fill_buffer2) -add_subdirectory(jobs2/byte_cursor_satisfies_pred2) -add_subdirectory(jobs2/byte_cursor_compare_lexical2) -add_subdirectory(jobs2/hash_iter_done2) -add_subdirectory(jobs2/hash_table_get_entry_count2) -add_subdirectory(jobs2/hash_iter_begin2) -add_subdirectory(jobs2/hash_table_find2) -add_subdirectory(jobs2/hash_iter_next2) -add_subdirectory(jobs2/hash_iter_begin_done2) -add_subdirectory(jobs2/ring_buffer_acquire2) -add_subdirectory(jobs2/ring_buffer_release2) -add_subdirectory(jobs2/ring_buffer_buf_belongs_to_pool2) -add_subdirectory(jobs2/ring_buffer_acquire_up_to2) - -add_subdirectory(jobs_unsafe/mem_realloc_unsafe) - -if(${SEA_WITH_BLEEDING_EDGE}) +# add_subdirectory(jobs2/array_eq2) +# add_subdirectory(jobs2/array_eq_c_str2) +# add_subdirectory(jobs2/array_eq_c_str_ignore_case2) +# add_subdirectory(jobs2/array_eq_ignore_case2) +# add_subdirectory(jobs2/array_list_back2) +# add_subdirectory(jobs2/array_list_capacity2) +# add_subdirectory(jobs2/array_list_ensure_capacity2) +# add_subdirectory(jobs2/array_list_front2) +# add_subdirectory(jobs2/array_list_get_at2) +# add_subdirectory(jobs2/array_list_get_at_ptr2) +# add_subdirectory(jobs2/array_list_init_static2) +# add_subdirectory(jobs2/array_list_length2) +# add_subdirectory(jobs2/array_list_pop_back2) +# add_subdirectory(jobs2/array_list_pop_front2) +# add_subdirectory(jobs2/array_list_push_back2) +# add_subdirectory(jobs2/array_list_set_at2) +# add_subdirectory(jobs2/array_list_shrink_to_fit2) +# add_subdirectory(jobs2/array_list_sort2) +# add_subdirectory(jobs2/byte_buf_eq2) +# add_subdirectory(jobs2/byte_buf_advance2) +# add_subdirectory(jobs2/byte_buf_append2) +# add_subdirectory(jobs2/byte_buf_append_dynamic2) +# add_subdirectory(jobs2/byte_buf_append_with_lookup2) +# add_subdirectory(jobs2/byte_buf_cat2) +# add_subdirectory(jobs2/byte_buf_eq_c_str2) +# add_subdirectory(jobs2/byte_buf_eq_c_str_ignore_case2) +# add_subdirectory(jobs2/byte_buf_eq_ignore_case2) +# add_subdirectory(jobs2/byte_buf_init_copy2) +# add_subdirectory(jobs2/array_list_clear2) +# add_subdirectory(jobs2/array_list_erase2) +# add_subdirectory(jobs2/array_list_pop_front_n2) +# add_subdirectory(jobs2/byte_buf_write2) +# add_subdirectory(jobs2/byte_buf_write_be162) +# add_subdirectory(jobs2/byte_buf_write_be322) +# add_subdirectory(jobs2/byte_buf_write_be642) +# add_subdirectory(jobs2/byte_buf_write_from_whole_buffer2) +# add_subdirectory(jobs2/byte_buf_write_from_whole_cursor2) +# add_subdirectory(jobs2/byte_buf_write_from_whole_string2) +# add_subdirectory(jobs2/byte_buf_write_u82) +# add_subdirectory(jobs2/byte_cursor_advance2) +# add_subdirectory(jobs2/byte_cursor_read2) +# add_subdirectory(jobs2/byte_cursor_read_u8_2) +# add_subdirectory(jobs2/byte_cursor_eq2) +# add_subdirectory(jobs2/byte_cursor_from_buf2) +# add_subdirectory(jobs2/byte_cursor_read_be32_2) +# add_subdirectory(jobs2/byte_cursor_eq_byte_buf_ignore_case2) +# add_subdirectory(jobs2/byte_cursor_eq_c_str2) +# add_subdirectory(jobs2/byte_cursor_read_be64_2) +# add_subdirectory(jobs2/byte_cursor_compare_lookup2) +# add_subdirectory(jobs2/byte_cursor_trim_pred2) +# add_subdirectory(jobs2/byte_cursor_left_trim_pred2) +# add_subdirectory(jobs2/byte_cursor_advance_nospec2) +# add_subdirectory(jobs2/byte_cursor_read_be16_2) +# add_subdirectory(jobs2/byte_cursor_eq_c_str_ignore_case2) +# add_subdirectory(jobs2/byte_cursor_eq_byte_buf2) +# add_subdirectory(jobs2/byte_cursor_eq_ignore_case2) +# add_subdirectory(jobs2/byte_cursor_right_trim_pred2) +# add_subdirectory(jobs2/byte_cursor_read_and_fill_buffer2) +# add_subdirectory(jobs2/byte_cursor_satisfies_pred2) +# add_subdirectory(jobs2/byte_cursor_compare_lexical2) +# add_subdirectory(jobs2/hash_iter_done2) +# add_subdirectory(jobs2/hash_table_get_entry_count2) +# add_subdirectory(jobs2/hash_iter_begin2) +# add_subdirectory(jobs2/hash_table_find2) +# add_subdirectory(jobs2/hash_iter_next2) +# add_subdirectory(jobs2/hash_iter_begin_done2) +# add_subdirectory(jobs2/ring_buffer_acquire2) +# add_subdirectory(jobs2/ring_buffer_release2) +# add_subdirectory(jobs2/ring_buffer_buf_belongs_to_pool2) +# add_subdirectory(jobs2/ring_buffer_acquire_up_to2) + +# if(${SEA_WITH_BLEEDING_EDGE}) add_subdirectory(jobs/priority_queue_s_swap) add_subdirectory(jobs/priority_queue_s_sift_up) add_subdirectory(jobs/priority_queue_s_sift_down) @@ -698,10 +704,93 @@ if(${SEA_WITH_BLEEDING_EDGE}) add_subdirectory(jobs/hash_table_foreach_deep_loose) add_subdirectory(jobs/hash_table_foreach_deep_precise) -endif() - -if(SEA_ENABLE_CRAB_JOBS) - add_subdirectory(jobs_crab/byte_buf_advance_crab) - add_subdirectory(jobs_crab/byte_buf_append_crab) -endif() +# endif() + +# if(SEA_ENABLE_CRAB OR SEA_ENABLE_CLAM) + # add_subdirectory(jobs_crab/array_list_back_crab) + # add_subdirectory(jobs_crab/array_list_capacity_crab) + # add_subdirectory(jobs_crab/array_list_clean_up_crab) + # add_subdirectory(jobs_crab/array_list_clear_crab) + # add_subdirectory(jobs_crab/array_list_comparator_string_crab) + # add_subdirectory(jobs_crab/array_list_copy_crab) + # add_subdirectory(jobs_crab/array_list_ensure_capacity_crab) + # add_subdirectory(jobs_crab/array_list_erase_crab) + # add_subdirectory(jobs_crab/array_list_front_crab) + # add_subdirectory(jobs_crab/array_list_get_at_crab) + # add_subdirectory(jobs_crab/array_list_get_at_ptr_crab) + # add_subdirectory(jobs_crab/array_list_init_dynamic_crab) + # add_subdirectory(jobs_crab/array_list_init_static_crab) + # add_subdirectory(jobs_crab/array_list_length_crab) + # add_subdirectory(jobs_crab/array_list_pop_back_crab) + # add_subdirectory(jobs_crab/array_list_pop_front_crab) + # add_subdirectory(jobs_crab/array_list_pop_front_n_crab) + # add_subdirectory(jobs_crab/array_list_push_back_crab) + # add_subdirectory(jobs_crab/array_list_push_front_crab) + # add_subdirectory(jobs_crab/array_list_set_at_crab) + # add_subdirectory(jobs_crab/array_list_shrink_to_fit_crab) + # add_subdirectory(jobs_crab/array_list_sort_crab) + # add_subdirectory(jobs_crab/array_list_swap_contents_crab) + # add_subdirectory(jobs_crab/array_list_swap_crab) + # add_subdirectory(jobs_crab/byte_buf_advance_crab) + # add_subdirectory(jobs_crab/byte_buf_append_crab) + # add_subdirectory(jobs_crab/byte_buf_append_dynamic_crab) + # add_subdirectory(jobs_crab/byte_buf_append_with_lookup_crab) + # add_subdirectory(jobs_crab/byte_buf_cat_crab) + # add_subdirectory(jobs_crab/byte_buf_clean_up_crab) + # add_subdirectory(jobs_crab/byte_buf_clean_up_secure_crab) + # add_subdirectory(jobs_crab/byte_buf_eq_c_str_crab) + # add_subdirectory(jobs_crab/byte_buf_eq_c_str_ignore_case_crab) + # add_subdirectory(jobs_crab/byte_buf_eq_crab) + # add_subdirectory(jobs_crab/byte_buf_eq_ignore_case_crab) + # add_subdirectory(jobs_crab/byte_buf_from_array_crab) + # add_subdirectory(jobs_crab/byte_buf_from_c_str_crab) + # add_subdirectory(jobs_crab/byte_buf_from_empty_array_crab) + # add_subdirectory(jobs_crab/byte_buf_init_copy_crab) + # add_subdirectory(jobs_crab/byte_buf_init_copy_from_cursor_crab) + # add_subdirectory(jobs_crab/byte_buf_init_crab) + # add_subdirectory(jobs_crab/byte_buf_reserve_crab) + # add_subdirectory(jobs_crab/byte_buf_reserve_relative_crab) + # add_subdirectory(jobs_crab/byte_buf_reset_crab) + # add_subdirectory(jobs_crab/byte_buf_secure_zero_crab) + # add_subdirectory(jobs_crab/byte_buf_write_be16_crab) + # add_subdirectory(jobs_crab/byte_buf_write_be32_crab) + # add_subdirectory(jobs_crab/byte_buf_write_be64_crab) + # add_subdirectory(jobs_crab/byte_buf_write_crab) + # add_subdirectory(jobs_crab/byte_buf_1) + # add_subdirectory(jobs_crab/byte_buf_cursor_1) + # add_subdirectory(jobs_crab/byte_buf_write_from_whole_buffer_crab) + # add_subdirectory(jobs_crab/byte_buf_write_from_whole_cursor_crab) + # add_subdirectory(jobs_crab/byte_buf_write_from_whole_string_crab) + # add_subdirectory(jobs_crab/byte_buf_write_u8_crab) + # add_subdirectory(jobs_crab/byte_cursor_1) + # add_subdirectory(jobs_crab/byte_cursor_advance_crab) + # add_subdirectory(jobs_crab/byte_cursor_advance_nospec_crab) + # add_subdirectory(jobs_crab/byte_cursor_compare_lexical_crab) + # add_subdirectory(jobs_crab/byte_cursor_read_crab) + # add_subdirectory(jobs_crab/hash_table_put_crab) + # add_subdirectory(jobs_crab/priority_queue_1) + # add_subdirectory(jobs_crab/priority_queue_scheduler) + # add_subdirectory(jobs_crab/priority_queue_push_crab) + # add_subdirectory(jobs_crab/string_eq_byte_buf_crab) + # add_subdirectory(jobs_crab/string_eq_crab) + add_subdirectory(jobs_compression/huffman_test_transitive) + add_subdirectory(jobs_compression/huffman_test_transitive_chunked) + add_subdirectory(jobs_sdkutils/profile_collection_new_from_buffer) + # add_subdirectory(jobs_sdkutils/profile_collection_new_from_merge) + # add_subdirectory(jobs_sdkutils/endpoints_rule_engine) + add_subdirectory(jobs_io/pem_objects_init_from_file_contents) + add_subdirectory(jobs_io/sanitize_pem) + add_subdirectory(jobs_io/input_stream_from_cursor) + # add_subdirectory(jobs_io/event_loop_group_setup_and_shutdown) + # add_subdirectory(jobs_io/event_thread_task_run) + add_subdirectory(jobs_cal/der) + add_subdirectory(jobs_cal/ecc) + add_subdirectory(jobs_cal/rsa) + add_subdirectory(jobs_cal/sha1) + add_subdirectory(jobs_cal/sha256) + add_subdirectory(jobs_cal/sha256_hmac) +# endif() # add_subdirectory(jobs_unsafe/hash_table_mem_leak) +# add_subdirectory(jobs_unsafe/unsat_1) +# add_subdirectory(jobs_unsafe/struct_assign) +# add_subdirectory(jobs_unsafe/ssa_crab) \ No newline at end of file diff --git a/seahorn/aws-c-common-stubs/CMakeLists.txt b/seahorn/aws-c-common-stubs/CMakeLists.txt index b2ed9f53..b3f5b970 100644 --- a/seahorn/aws-c-common-stubs/CMakeLists.txt +++ b/seahorn/aws-c-common-stubs/CMakeLists.txt @@ -51,4 +51,22 @@ add_library(hash_table_state_is_valid_ignore_entry_count_override hash_table_sta sea_attach_bc(hash_table_state_is_valid_ignore_entry_count_override) add_library(aws_hash_iter_overrides aws_hash_iter_overrides.c) -sea_attach_bc(aws_hash_iter_overrides) \ No newline at end of file +sea_attach_bc(aws_hash_iter_overrides) + +add_library(aws_logger_overrides aws_logger_overrides.c) +sea_attach_bc(aws_logger_overrides) + +add_library(aws_hash_string_overrides aws_hash_string_overrides.c) +sea_attach_bc(aws_hash_string_overrides) + +add_library(aws_hash_byte_cursor_ptr_overrides aws_hash_byte_cursor_ptr_overrides.c) +sea_attach_bc(aws_hash_byte_cursor_ptr_overrides) + +add_library(aws_ref_count_overrides aws_ref_count_overrides.c) +sea_attach_bc(aws_ref_count_overrides) + +add_library(snprintf_overrides snprintf_overrides.c) +sea_attach_bc(snprintf_overrides) + +add_library(aws_nospec_mask_overrides aws_nospec_mask_overrides.c) +sea_attach_bc(aws_nospec_mask_overrides) \ No newline at end of file diff --git a/seahorn/clam_base.yaml b/seahorn/clam_base.yaml index d6c7270d..93c53107 100644 --- a/seahorn/clam_base.yaml +++ b/seahorn/clam_base.yaml @@ -2,6 +2,7 @@ clam_options: # CLANG # '-g' : '' + 'inline': '' # Optimization level for seaopt '-O3': '' save-temps: '' @@ -16,7 +17,7 @@ clam_options: # # ANALYSIS # - crab-dom : 'int' + crab-dom : 'zones' crab-inter : '' crab-inter-recursive-functions : true crab-inter-exact-summary-reuse : false @@ -29,15 +30,17 @@ clam_options: crab-widening-delay: '2' crab-widening-jump-set: '0' crab-preserve-invariants: false - crab-dom-param: "region.is_dereferenceable=true:object.reduce_everywhere=true" - crab-check: 'bounds' + crab-dom-param: "region.is_dereferenceable=true:object.singletons_in_base=false:object.reduction_level=NONE" + crab-check: 'is-deref' # bounds assert # # OUTPUT # # crab-sanity-checks : '' crab-disable-warnings: '' -# crab-print-invariants: false + # crab-heap-dot: '' + crab-print-invariants: false # crab-stats: '' # crab-verbose: 5 # log: 'object-print:inter:object-forget:object-project:object-leq:object-meet:inter:object-join:object-copy:inter-group' - # ocrab: 'clam.crabir' \ No newline at end of file + # ocrab: 'obj.crabir' + # crab-heap-dot: '' \ No newline at end of file diff --git a/seahorn/include/bounds.h b/seahorn/include/bounds.h index 00da72b6..721817e3 100644 --- a/seahorn/include/bounds.h +++ b/seahorn/include/bounds.h @@ -40,4 +40,6 @@ size_t fuzz_max_array_list_item_size(void); */ size_t fuzz_max_array_list_len(void); +size_t crab_max_array_size(void); + SEAHORN_EXTERN_C_END diff --git a/seahorn/include/config.h.in b/seahorn/include/config.h.in index a3e915cc..7b332950 100644 --- a/seahorn/include/config.h.in +++ b/seahorn/include/config.h.in @@ -45,4 +45,8 @@ #define MAX_HEAP_HEIGHT @MAX_HEAP_HEIGHT@ #endif +#ifndef MAX_ARRAY_SIZE +#define MAX_ARRAY_SIZE @MAX_ARRAY_SIZE@ +#endif + /* clang-format on */ diff --git a/seahorn/jobs/byte_cursor_advance_nospec/CMakeLists.txt b/seahorn/jobs/byte_cursor_advance_nospec/CMakeLists.txt index e138c415..0f0eaa8c 100644 --- a/seahorn/jobs/byte_cursor_advance_nospec/CMakeLists.txt +++ b/seahorn/jobs/byte_cursor_advance_nospec/CMakeLists.txt @@ -1,6 +1,8 @@ add_executable(byte_cursor_advance_nospec ${AWS_C_COMMON_ROOT}/source/byte_buf.c aws_byte_cursor_advance_nospec_harness.c) +sea_overlink_libraries(byte_cursor_advance_nospec aws_nospec_mask_overrides.ir) +sea_overlink_libraries(byte_cursor_advance_nospec byte_cursor_advance_nospec_override.ir) sea_attach_bc_link(byte_cursor_advance_nospec) sea_add_unsat_test(byte_cursor_advance_nospec) diff --git a/seahorn/jobs/hash_iter_done/CMakeLists.txt b/seahorn/jobs/hash_iter_done/CMakeLists.txt index 457afdae..54c42246 100644 --- a/seahorn/jobs/hash_iter_done/CMakeLists.txt +++ b/seahorn/jobs/hash_iter_done/CMakeLists.txt @@ -3,7 +3,7 @@ add_executable( ${AWS_C_COMMON_ROOT}/source/common.c aws_hash_iter_done_harness.c ) -target_compile_definitions(hash_iter_done PUBLIC MAX_TABLE_SIZE=SIZE_MAX) +target_compile_definitions(hash_iter_done PUBLIC MAX_TABLE_SIZE=${MAX_TABLE_SIZE}) sea_link_libraries(hash_iter_done hash_table.opt.ir) sea_overlink_libraries(hash_iter_done hash_table_state_is_valid_override.ir) sea_attach_bc_link(hash_iter_done) diff --git a/seahorn/jobs_crab/byte_buf_advance_crab/aws_byte_buf_advance_crab_harness.c b/seahorn/jobs_crab/byte_buf_advance_crab/aws_byte_buf_advance_crab_harness.c index a0fb157c..9f85574c 100644 --- a/seahorn/jobs_crab/byte_buf_advance_crab/aws_byte_buf_advance_crab_harness.c +++ b/seahorn/jobs_crab/byte_buf_advance_crab/aws_byte_buf_advance_crab_harness.c @@ -6,51 +6,60 @@ #include #include #include +#include +#include +#include "proof_allocators.h" -extern void sea_dsa_alias(const void *p, ...); +#define SIZE MAX_ARRAY_SIZE int main() { /* parameters */ - struct aws_byte_buf buf; - initialize_byte_buf(&buf); + struct aws_byte_buf *buf_ary[SIZE]; + + for (unsigned i = 0; i < SIZE; i ++) { + struct aws_byte_buf *tmp = bounded_malloc_havoc(sizeof(struct aws_byte_buf)); + initialize_byte_buf(tmp); + assume(aws_byte_buf_is_valid(tmp)); + buf_ary[i] = tmp; + } + uint8_t idx = nd_uint8_t(); + assume(idx < SIZE); + struct aws_byte_buf *buf = buf_ary[idx]; struct aws_byte_buf output; size_t len = nd_size_t(); /* assumptions */ - assume(aws_byte_buf_is_valid(&buf)); if (nd_bool()) { - output = buf; + output = *buf; } else { initialize_byte_buf(&output); assume(aws_byte_buf_is_valid(&output)); } /* save current state of the parameters */ - struct aws_byte_buf old = buf; + struct aws_byte_buf old = *buf; struct store_byte_from_buffer old_byte_from_buf; - save_byte_from_array(buf.buffer, buf.len, &old_byte_from_buf); - - sea_dsa_alias(&old,&buf,&output); + save_byte_from_array(buf->buffer, buf->len, &old_byte_from_buf); /* operation under verification */ - if (aws_byte_buf_advance(&buf, &output, len)) { - sassert(buf.len == old.len + len); - sassert(buf.capacity == old.capacity); - sassert(buf.allocator == old.allocator); + if (aws_byte_buf_advance(buf, &output, len)) { + sassert(buf->len == old.len + len); + sassert(buf->capacity == old.capacity); + sassert(buf->allocator == old.allocator); if (old.len > 0) { - assert_byte_from_buffer_matches(buf.buffer, &old_byte_from_buf); + assert_byte_from_buffer_matches(buf->buffer, &old_byte_from_buf); } sassert(output.len == 0); sassert(output.capacity == len); sassert(output.allocator == NULL); } else { - assert_byte_buf_equivalence(&buf, &old, &old_byte_from_buf); + assert_byte_buf_equivalence(buf, &old, &old_byte_from_buf); sassert(output.len == 0); sassert(output.capacity == 0); sassert(output.allocator == NULL); sassert(output.buffer == NULL); } - sassert(aws_byte_buf_is_valid(&buf)); + sassert(aws_byte_buf_is_valid(buf)); sassert(aws_byte_buf_is_valid(&output)); return 0; diff --git a/seahorn/jobs_crab/byte_buf_append_crab/aws_byte_buf_append_crab_harness.c b/seahorn/jobs_crab/byte_buf_append_crab/aws_byte_buf_append_crab_harness.c index 90570049..4b198a21 100644 --- a/seahorn/jobs_crab/byte_buf_append_crab/aws_byte_buf_append_crab_harness.c +++ b/seahorn/jobs_crab/byte_buf_append_crab/aws_byte_buf_append_crab_harness.c @@ -6,50 +6,57 @@ #include #include #include +#include +#include -extern void sea_dsa_alias(const void *p, ...); - -#define ARRAY_SIZE 5 +#define SIZE MAX_ARRAY_SIZE int main() { - size_t nd_ary_sz = nd_size_t(); - assume(nd_ary_sz <= ARRAY_SIZE); - struct aws_byte_buf tos[nd_ary_sz]; - for (size_t i = 0; i < nd_ary_sz; i++) { - initialize_byte_buf(&tos[i]); - assume(aws_byte_buf_is_valid(&tos[i])); + /* parameters */ + struct aws_byte_buf *buf_ary[SIZE]; + #pragma unroll 1 + for (unsigned i = 0; i < SIZE; ++i) { + struct aws_byte_buf *tmp = malloc(sizeof(struct aws_byte_buf)); + initialize_byte_buf(tmp); + assume(aws_byte_buf_is_valid(tmp)); + buf_ary[i] = tmp; } + uint8_t idx = nd_uint8_t(); + assume(idx < SIZE); + struct aws_byte_buf *to = buf_ary[idx]; /* save current state of the data structure */ - struct aws_byte_buf tos_old[nd_ary_sz]; - memcpy(&tos_old, &tos, sizeof(struct aws_byte_buf) * nd_ary_sz); - - struct aws_byte_cursor froms[nd_ary_sz]; - for (size_t i = 0; i < nd_ary_sz; i++) { - initialize_byte_cursor(&froms[i]); - assume(aws_byte_cursor_is_valid(&froms[i])); + struct aws_byte_buf to_old = *to; + + struct aws_byte_cursor *cursor_ary[SIZE]; + #pragma unroll 1 + for (unsigned i = 0; i < SIZE; ++i) { + struct aws_byte_cursor *tmp = malloc(sizeof(struct aws_byte_cursor)); + initialize_byte_cursor(tmp); + assume(aws_byte_cursor_is_valid(tmp)); + cursor_ary[i] = tmp; } + uint8_t idx2 = nd_uint8_t(); + assume(idx2 < SIZE); + struct aws_byte_cursor *from = cursor_ary[idx2]; /* save current state of the data structure */ - struct aws_byte_cursor froms_old[nd_ary_sz]; - memcpy(&froms_old, &froms, sizeof(struct aws_byte_cursor) * nd_ary_sz); - - for (size_t i = 0; i < nd_ary_sz; i++) { - if (aws_byte_buf_append(&tos[i], &froms[i]) == AWS_OP_SUCCESS) { - sassert(tos[i].len == tos_old[i].len + froms[i].len); - } else { - /* if the operation return an error, to must not change */ - // assert_bytes_match(tos_old[i].buffer, tos[i].buffer, tos[i].len); - sassert(tos_old[i].len == tos[i].len); - } - - sassert(aws_byte_buf_is_valid(&tos[i])); - sassert(aws_byte_cursor_is_valid(&froms[i])); - sassert(tos_old[i].allocator == tos[i].allocator); - sassert(tos_old[i].capacity == tos[i].capacity); - // assert_bytes_match(froms_old[i].ptr, froms[i].ptr, froms[i].len); - sassert(froms_old[i].len == froms[i].len); + struct aws_byte_cursor from_old = *from; + + if (aws_byte_buf_append(to, from) == AWS_OP_SUCCESS) { + sassert(to->len == to_old.len + from->len); + } else { + /* if the operation return an error, to must not change */ + assert_bytes_match(to_old.buffer, to->buffer, to->len); + sassert(to_old.len == to->len); } + sassert(aws_byte_buf_is_valid(to)); + sassert(aws_byte_cursor_is_valid(from)); + sassert(to_old.allocator == to->allocator); + sassert(to_old.capacity == to->capacity); + assert_bytes_match(from_old.ptr, from->ptr, from->len); + sassert(from_old.len == from->len); + return 0; } diff --git a/seahorn/lib/CMakeLists.txt b/seahorn/lib/CMakeLists.txt index f1b80649..8cfecba6 100644 --- a/seahorn/lib/CMakeLists.txt +++ b/seahorn/lib/CMakeLists.txt @@ -18,6 +18,7 @@ add_library( allocator_override.c bcmp.c error_override.c + pthread_helper.c ) if(SEA_ALLOCATOR_CAN_FAIL) target_compile_definitions(sea_proofs PRIVATE SEA_ALLOCATOR_CAN_FAIL) diff --git a/seahorn/lib/bcmp.c b/seahorn/lib/bcmp.c index fc110f37..53eecd7b 100644 --- a/seahorn/lib/bcmp.c +++ b/seahorn/lib/bcmp.c @@ -174,4 +174,18 @@ INLINE void *memset(void * dst, int s, size_t count) { return dst; } #endif -#endif \ No newline at end of file +#endif + +INLINE void *memchr(const void *str, int c, size_t n) { + sassert(sea_is_dereferenceable(str, n)); + return __builtin_memchr(str, c, n); +} + +INLINE int strncmp(const char *str1, const char *str2, size_t n) { + for (size_t i = 0; i < n; i++) { + if (str1[i] == '\0' || str2[i] == '\0') { + return memcmp(str1, str2, i); + } + } + return memcmp(str1, str2, n); +} \ No newline at end of file diff --git a/seahorn/lib/bounds.c b/seahorn/lib/bounds.c index 47e8dc34..bcd47250 100644 --- a/seahorn/lib/bounds.c +++ b/seahorn/lib/bounds.c @@ -8,4 +8,5 @@ size_t sea_max_array_list_item_size(void) { return MAX_ITEM_SIZE; } size_t sea_max_array_list_len(void) { return MAX_INITIAL_ITEM_ALLOCATION; } size_t fuzz_max_array_list_len(void) { return MAX_INITIAL_ITEM_ALLOCATION_FUZZ; } size_t fuzz_max_array_list_item_size(void) { return MAX_ITEM_SIZE_FUZZ; } -size_t sea_max_table_size(void) { return MAX_TABLE_SIZE; } \ No newline at end of file +size_t sea_max_table_size(void) { return MAX_TABLE_SIZE; } +size_t crab_max_array_size(void) { return MAX_ARRAY_SIZE; } \ No newline at end of file diff --git a/seahorn/sea.yaml b/seahorn/sea.yaml deleted file mode 120000 index 6a1de3fd..00000000 --- a/seahorn/sea.yaml +++ /dev/null @@ -1 +0,0 @@ -sea_base.yaml \ No newline at end of file diff --git a/seahorn/sea.yaml b/seahorn/sea.yaml new file mode 100644 index 00000000..1182e03e --- /dev/null +++ b/seahorn/sea.yaml @@ -0,0 +1,46 @@ +verify_options: +# Optimization level + '-O3': '' +# +# PREPROCESSING +# + inline: '' + enable-loop-idiom: '' + enable-indvar: '' + no-lower-gv-init-struct: '' + externalize-addr-taken-functions: '' + no-kill-vaarg: '' + with-arith-overflow: true + horn-unify-assumes: true + horn-gsa: '' +# do not instrument library functions with memory assertions + no-fat-fns: 'bcmp,memcpy,assert_bytes_match,ensure_linked_list_is_allocated,sea_aws_linked_list_is_valid' +# context-sensitive type-aware alias analysis + dsa: sea-cs-t +# weak support for function pointers. sea-dsa is better but less stable + devirt-functions: 'types' +# bit-precise memory-precise operational semantics + bmc: opsem + horn-vcgen-use-ite: '' +# ignore control flow + horn-vcgen-only-dataflow: true +# reduce by data dependence + horn-bmc-coi: true +# static allocator supports symbolic allocation size + sea-opsem-allocator: static +# stack allocation from a symbolic starting point + horn-explicit-sp0: false +# lambdas for memory + horn-bv2-lambdas: '' +# use z3 simplifier during vcgen + horn-bv2-simplify: true +# wide memory manager to track pointer sizes + horn-bv2-extra-widemem: '' +# intermediate results in human readable form for debugging + '-S': '' +# keep intermediate results for debugging + keep-temps: '' + temp-dir: /tmp/verify-c-common +# time and result stats + horn-stats: true + # seapp-peel-loops: 1 \ No newline at end of file diff --git a/seahorn/sea_cex_base.yaml b/seahorn/sea_cex_base.yaml index f97a22b6..815f16f7 100644 --- a/seahorn/sea_cex_base.yaml +++ b/seahorn/sea_cex_base.yaml @@ -38,7 +38,7 @@ verify_options: # stack allocation from a symbolic starting point horn-explicit-sp0: false # lambdas for memory - horn-bv2-lambdas: false + horn-bv2-lambdas: true # use z3 simplifier during vcgen horn-bv2-simplify: true # wide memory manager to track pointer sizes diff --git a/seahorn/sea_crab_base.yaml b/seahorn/sea_crab_base.yaml index 9b0bc068..77ace0ac 100644 --- a/seahorn/sea_crab_base.yaml +++ b/seahorn/sea_crab_base.yaml @@ -9,11 +9,22 @@ verify_options: horn-bv2-crab-check-is-deref: '' # reduce used by object domain # horn-bv2-crab-reduce: '' -# Use crab infer allocation range +# loop peeling + seapp-peel-loops: 1 +# Use crab infer bounds / range (after loop unrolling) # horn-bv2-crab-rng: '' - horn-bmc-crab-dom: 'pk' + horn-bmc-crab-dom: 'zones' + horn-bv2-crab-obj-reduction: 'OPT' + # horn-bv2-crab-stats: '' + # log: 'opsem-crab' + # log: 'opsem-crab-ir' + crab-disable-warning: true # Statistics # horn-bv2-crab-stats: '' +# Use crab infer bounds / range (before loop unrolling) seapp-crab-lower-is-deref: true seapp-crab-dom: 'zones' - seapp-crab-stats: true \ No newline at end of file + seapp-crab-stats: true + seapp-crab-disable-warning: true + seapp-crab-obj-reduction: 'OPT' + # log: 'opsem-crab-ir:opsem-crab:seapp-crab-ir:seapp-crab' \ No newline at end of file diff --git a/verify.py.in b/verify.py.in index 1ea9dc8d..d591bcb2 100755 --- a/verify.py.in +++ b/verify.py.in @@ -51,7 +51,7 @@ def main(argv): help='Do not produce any output') argp.add_argument('--expect', type=str, default=None, help='Expected string in the output') - argp.add_argument('--command', type=str, default='fpf', + argp.add_argument('--command', type=str, default='fpcf', help='sea command') argp.add_argument('--cex', action='store_true', default=False, help='Counterexample mode') @@ -129,7 +129,7 @@ def main(argv): cmd.append(args.command) # Clam config if args.clam: - cmd = ['/home/ljgy/Works/seahorn/clam/build/run/bin/clam-yaml.py'] + cmd = ['/home/yusen/clam/build/run/bin/clam-yaml.py'] clam_config = os.path.join(script_dir, 'seahorn', 'clam.base.yaml') cmd.extend(['-y', clam_config]) From faf8f24042789f381d6e13f51072d3f0dc041490 Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Thu, 29 Feb 2024 20:45:20 -0500 Subject: [PATCH 22/24] feat: config yamls and fix scripts --- scripts/get_exper_brunch_stat.py | 26 ++++++++++++- scripts/get_exper_res.py | 9 ++--- scripts/metrics.yaml | 4 ++ seahorn/CMakeLists.txt | 8 +++- seahorn/include/bounds.h | 1 - seahorn/lib/bounds.c | 3 +- seahorn/sea.yaml | 47 +---------------------- seahorn/sea_base.yaml | 9 +++-- seahorn/sea_cex_base.yaml | 4 +- seahorn/sea_crab.yaml | 18 +++++++++ seahorn/sea_crab_base.yaml | 64 ++++++++++++++++++++------------ 11 files changed, 107 insertions(+), 86 deletions(-) mode change 100644 => 120000 seahorn/sea.yaml create mode 100644 seahorn/sea_crab.yaml diff --git a/scripts/get_exper_brunch_stat.py b/scripts/get_exper_brunch_stat.py index f61f2342..47fa2429 100644 --- a/scripts/get_exper_brunch_stat.py +++ b/scripts/get_exper_brunch_stat.py @@ -3,7 +3,7 @@ import csv import argparse import yaml -import glob +import glob, os from collections import defaultdict METRICS_FILE = "metrics.yaml" @@ -25,6 +25,8 @@ def read_brunchstat_from_log(log_file_name): # remove _unsat_test cur_test = test_name[:-12] new_test_data['job_name'] = cur_test + new_test_data['pp_loc'] = get_loc_info(cur_test, True) + new_test_data['opsem_loc'] = get_loc_info(cur_test, False) data.append(new_test_data) elif line.startswith("BRUNCH_STAT"): stat = line.split() @@ -37,6 +39,27 @@ def read_brunchstat_from_log(log_file_name): log_file.close() return data +def get_loc_info(file_name, is_pp): + # Directory where the files are located + directory_path = "/tmp/verify-c-common/" + + ir_name = f"{file_name}.ir.peel.fat.pp.ms.bc" if is_pp else f"{file_name}.ir.peel.fat.pp.ms.crab.o.ul.cut.o.bc" + + # Construct the full path to the file with its pattern + full_path = os.path.join(directory_path, ir_name) + + # Get the LOC of the file + LOC = 'n/a' + with open(full_path, 'r') as file: + lines = file.readlines() + start = False + for line in lines: + if start: + LOC += 1 + if '@main' in line: + LOC = 0 + start = True + return LOC def read_symbiotic_bruchstat_from_log(log_file_name, xml_file_dir, time_out): log_file = open(log_file_name, 'r') @@ -115,6 +138,7 @@ def write_brunchstat_into_csv(data, out_file): for job_data in data: row = (job_data[k] for k, _ in metric_serlz) writer.writerow(row) + print(f'[Statistics] Please find stat csv file at {out_file}') # def read_timimg_results(result_dir): diff --git a/scripts/get_exper_res.py b/scripts/get_exper_res.py index c057ca94..531809d9 100644 --- a/scripts/get_exper_res.py +++ b/scripts/get_exper_res.py @@ -5,8 +5,7 @@ import glob import subprocess import argparse -from get_exper_brunch_stat import read_brunchstat_from_log, write_brunchstat_into_csv - +from get_exper_brunch_stat import * BUILDABSPATH = os.path.abspath('../build/') DATAABSPATH = os.path.abspath('../') + "/data" SEAHORN_ROOT = '/home/yusen/seawork/seahorn/build/run' # Put your seahorn root dir here @@ -62,7 +61,7 @@ def make_new_cmake_conf(): smack_args = "" return f'cmake -DSEA_LINK=llvm-link-{LLVM_VERSION} -DCMAKE_C_COMPILER=clang-{LLVM_VERSION}\ -DCMAKE_CXX_COMPILER=clang++-{LLVM_VERSION} -DSEA_ENABLE_KLEE={use_klee} {smack_args}\ - -DSEA_WITH_BLEEDING_EDGE={use_bleeding_edge} -DSEA_ENABLE_CRAB=OFF\ + -DSEA_WITH_BLEEDING_EDGE={use_bleeding_edge} -DSEA_ENABLE_CRAB=ON\ -DSEA_ENABLE_SYMBIOTIC={use_symbiotic} -DSEAHORN_ROOT={SEAHORN_ROOT} ../ -GNinja' @@ -96,7 +95,7 @@ def collect_res_from_ctest(file_name): read_data_from_xml(res_data) write_data_into_csv( "{dir}/{file}".format(dir="../data", file=file_name), res_data) - print("Done, find result csv file at: %s" % file_name) + print(f'[Results] Please find result csv file at: {os.path.abspath("../data")}') def run_ctest_for_seahorn(): @@ -108,7 +107,7 @@ def run_ctest_for_seahorn(): set_env = f'env VERIFY_FLAGS=\"{verify_flags}\"' cmake_conf = make_new_cmake_conf() command_lst = ["rm -rf *", cmake_conf, "ninja", - f'{set_env} ctest -j 12 -D ExperimentalTest -R . --timeout {args.timeout}'] + f'{set_env} ctest -j 1 -D ExperimentalTest -R . --timeout {args.timeout}'] make_build_path(extra) cddir = "cd " + BUILDABSPATH for strcmd in command_lst: diff --git a/scripts/metrics.yaml b/scripts/metrics.yaml index ca828360..5f8a2fd2 100644 --- a/scripts/metrics.yaml +++ b/scripts/metrics.yaml @@ -1,5 +1,7 @@ job_name: "job_name" Result: "result" +pp_loc: "pp_loc" +opsem_loc: "opsem_loc" bmc.circ_sz: "bmc_circuit_size" bmc.dag_sz: "bmc_dag_size" crab.opsem.isderef.not.solve: "opsem.isderef.not.solve" # for crab @@ -7,6 +9,8 @@ crab.opsem.isderef.solve: "opsem.isderef.solve" # for crab crab.pp.isderef.not.solve: "pp.isderef.not.solve" crab.pp.isderef.solve: "pp.isderef.solve" opsem.crab: "opsem_crab_time" +opsem.crab.solve: "opsem_crab_solve_time" +pp.crab.solve: "pp_crab_solve_time" seapp.crab: "pp_crab_time" BMC: "bmc_time" BMC.solve: "bmc_solve_time" diff --git a/seahorn/CMakeLists.txt b/seahorn/CMakeLists.txt index 1b57021f..11afe7c5 100644 --- a/seahorn/CMakeLists.txt +++ b/seahorn/CMakeLists.txt @@ -438,8 +438,12 @@ add_definitions( ) configure_file(include/config.h.in ${PROJECT_BINARY_DIR}/include/config.h) -configure_file(sea.yaml sea.yaml @ONLY) -configure_file(sea_crab_base.yaml sea.crab.yaml @ONLY) +if(SEA_ENABLE_CRAB) + configure_file(sea_crab_base.yaml sea.yaml @ONLY) +else() + configure_file(sea.yaml sea.yaml @ONLY) +endif() +configure_file(sea_crab.yaml sea.crab.yaml @ONLY) configure_file(sea_cex_base.yaml sea.cex.yaml @ONLY) configure_file(sea_vac.yaml sea.vac.yaml @ONLY) configure_file(sea_pcond.yaml sea.pcond.yaml @ONLY) diff --git a/seahorn/include/bounds.h b/seahorn/include/bounds.h index 721817e3..2d6f1a72 100644 --- a/seahorn/include/bounds.h +++ b/seahorn/include/bounds.h @@ -40,6 +40,5 @@ size_t fuzz_max_array_list_item_size(void); */ size_t fuzz_max_array_list_len(void); -size_t crab_max_array_size(void); SEAHORN_EXTERN_C_END diff --git a/seahorn/lib/bounds.c b/seahorn/lib/bounds.c index bcd47250..47e8dc34 100644 --- a/seahorn/lib/bounds.c +++ b/seahorn/lib/bounds.c @@ -8,5 +8,4 @@ size_t sea_max_array_list_item_size(void) { return MAX_ITEM_SIZE; } size_t sea_max_array_list_len(void) { return MAX_INITIAL_ITEM_ALLOCATION; } size_t fuzz_max_array_list_len(void) { return MAX_INITIAL_ITEM_ALLOCATION_FUZZ; } size_t fuzz_max_array_list_item_size(void) { return MAX_ITEM_SIZE_FUZZ; } -size_t sea_max_table_size(void) { return MAX_TABLE_SIZE; } -size_t crab_max_array_size(void) { return MAX_ARRAY_SIZE; } \ No newline at end of file +size_t sea_max_table_size(void) { return MAX_TABLE_SIZE; } \ No newline at end of file diff --git a/seahorn/sea.yaml b/seahorn/sea.yaml deleted file mode 100644 index 1182e03e..00000000 --- a/seahorn/sea.yaml +++ /dev/null @@ -1,46 +0,0 @@ -verify_options: -# Optimization level - '-O3': '' -# -# PREPROCESSING -# - inline: '' - enable-loop-idiom: '' - enable-indvar: '' - no-lower-gv-init-struct: '' - externalize-addr-taken-functions: '' - no-kill-vaarg: '' - with-arith-overflow: true - horn-unify-assumes: true - horn-gsa: '' -# do not instrument library functions with memory assertions - no-fat-fns: 'bcmp,memcpy,assert_bytes_match,ensure_linked_list_is_allocated,sea_aws_linked_list_is_valid' -# context-sensitive type-aware alias analysis - dsa: sea-cs-t -# weak support for function pointers. sea-dsa is better but less stable - devirt-functions: 'types' -# bit-precise memory-precise operational semantics - bmc: opsem - horn-vcgen-use-ite: '' -# ignore control flow - horn-vcgen-only-dataflow: true -# reduce by data dependence - horn-bmc-coi: true -# static allocator supports symbolic allocation size - sea-opsem-allocator: static -# stack allocation from a symbolic starting point - horn-explicit-sp0: false -# lambdas for memory - horn-bv2-lambdas: '' -# use z3 simplifier during vcgen - horn-bv2-simplify: true -# wide memory manager to track pointer sizes - horn-bv2-extra-widemem: '' -# intermediate results in human readable form for debugging - '-S': '' -# keep intermediate results for debugging - keep-temps: '' - temp-dir: /tmp/verify-c-common -# time and result stats - horn-stats: true - # seapp-peel-loops: 1 \ No newline at end of file diff --git a/seahorn/sea.yaml b/seahorn/sea.yaml new file mode 120000 index 00000000..6a1de3fd --- /dev/null +++ b/seahorn/sea.yaml @@ -0,0 +1 @@ +sea_base.yaml \ No newline at end of file diff --git a/seahorn/sea_base.yaml b/seahorn/sea_base.yaml index 0595bd3b..f84918b6 100644 --- a/seahorn/sea_base.yaml +++ b/seahorn/sea_base.yaml @@ -11,7 +11,7 @@ verify_options: externalize-addr-taken-functions: '' no-kill-vaarg: '' with-arith-overflow: true - horn-unify-assumes: true + horn-unify-assumes: false horn-gsa: '' # do not instrument library functions with memory assertions no-fat-fns: 'bcmp,memcpy,assert_bytes_match,ensure_linked_list_is_allocated,sea_aws_linked_list_is_valid' @@ -23,9 +23,9 @@ verify_options: bmc: opsem horn-vcgen-use-ite: '' # ignore control flow - horn-vcgen-only-dataflow: true + horn-vcgen-only-dataflow: false # reduce by data dependence - horn-bmc-coi: true + horn-bmc-coi: false # static allocator supports symbolic allocation size sea-opsem-allocator: static # stack allocation from a symbolic starting point @@ -43,4 +43,5 @@ verify_options: temp-dir: /tmp/verify-c-common # time and result stats horn-stats: true - ignore-define-verifier-fns: '' + # seapp-peel-loops: 1 + ignore-define-verifier-fns: '' \ No newline at end of file diff --git a/seahorn/sea_cex_base.yaml b/seahorn/sea_cex_base.yaml index 815f16f7..76a8ef1e 100644 --- a/seahorn/sea_cex_base.yaml +++ b/seahorn/sea_cex_base.yaml @@ -7,7 +7,7 @@ # verify_options: # Optimization lelel. Lower to O1 for clearer counterexamples - '-O1': '' + '-O3': '' # # PREPROCESSING # @@ -38,7 +38,7 @@ verify_options: # stack allocation from a symbolic starting point horn-explicit-sp0: false # lambdas for memory - horn-bv2-lambdas: true + horn-bv2-lambdas: '' # use z3 simplifier during vcgen horn-bv2-simplify: true # wide memory manager to track pointer sizes diff --git a/seahorn/sea_crab.yaml b/seahorn/sea_crab.yaml new file mode 100644 index 00000000..fcf2871d --- /dev/null +++ b/seahorn/sea_crab.yaml @@ -0,0 +1,18 @@ +verify_options: +# Use crab infer bounds / range (before loop unrolling) + seapp-crab-lower-is-deref: true + seapp-crab-dom: 'zones' + seapp-crab-stats: true + seapp-crab-disable-warning: true + seapp-crab-obj-reduction: 'OPT' +# Use crab infer bounds(after loop unrolling) + horn-bv2-crab-check-is-deref: '' + horn-bmc-crab-dom: 'zones' + horn-bv2-crab-obj-reduction: 'OPT' + # log: 'opsem-crab' + # log: 'opsem-crab-ir' + crab-disable-warning: true +# Statistics + # crab-show-stats: '' + # horn-bv2-crab-stats: '' + # log: 'opsem-crab-ir:opsem-crab:seapp-crab-ir:seapp-crab' \ No newline at end of file diff --git a/seahorn/sea_crab_base.yaml b/seahorn/sea_crab_base.yaml index 77ace0ac..cc06860c 100644 --- a/seahorn/sea_crab_base.yaml +++ b/seahorn/sea_crab_base.yaml @@ -1,30 +1,48 @@ verify_options: +# Optimization level. + '-O3': '' +# +# PREPROCESSING +# + inline: '' + enable-loop-idiom: '' + enable-indvar: '' + no-lower-gv-init-struct: '' + externalize-addr-taken-functions: '' + no-kill-vaarg: '' + with-arith-overflow: true + # disable program transformation to single assume horn-unify-assumes: false +# do not instrument library functions with memory assertions + no-fat-fns: 'bcmp,memcpy,assert_bytes_match,ensure_linked_list_is_allocated,sea_aws_linked_list_is_valid' horn-gsa: false +# context-sensitive type-aware alias analysis + dsa: sea-cs-t +# weak support for function pointers. sea-dsa is better but less stable + devirt-functions: 'types' +# bit-precise memory-precise operational semantics + bmc: opsem + horn-vcgen-use-ite: '' # ignore control flow horn-vcgen-only-dataflow: false # reduce by data dependence horn-bmc-coi: false -# Use crab solve sea.is_deref - horn-bv2-crab-check-is-deref: '' -# reduce used by object domain - # horn-bv2-crab-reduce: '' -# loop peeling - seapp-peel-loops: 1 -# Use crab infer bounds / range (after loop unrolling) - # horn-bv2-crab-rng: '' - horn-bmc-crab-dom: 'zones' - horn-bv2-crab-obj-reduction: 'OPT' - # horn-bv2-crab-stats: '' - # log: 'opsem-crab' - # log: 'opsem-crab-ir' - crab-disable-warning: true -# Statistics - # horn-bv2-crab-stats: '' -# Use crab infer bounds / range (before loop unrolling) - seapp-crab-lower-is-deref: true - seapp-crab-dom: 'zones' - seapp-crab-stats: true - seapp-crab-disable-warning: true - seapp-crab-obj-reduction: 'OPT' - # log: 'opsem-crab-ir:opsem-crab:seapp-crab-ir:seapp-crab' \ No newline at end of file +# static allocator supports symbolic allocation size + sea-opsem-allocator: static +# stack allocation from a symbolic starting point + horn-explicit-sp0: false +# lambdas for memory + horn-bv2-lambdas: '' +# use z3 simplifier during vcgen + horn-bv2-simplify: true +# wide memory manager to track pointer sizes + horn-bv2-extra-widemem: '' +# intermediate results in human readable form for debugging + '-S': '' +# keep intermediate results for debugging + keep-temps: '' + temp-dir: /tmp/verify-c-common +# time and result stats + horn-stats: true +# ignore converting function with prefix sea_nd to nd function + ignore-define-verifier-fns: '' \ No newline at end of file From 1c16e590d692c0d7d1462fb4926dabcfe46a34b5 Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Thu, 29 Feb 2024 20:49:17 -0500 Subject: [PATCH 23/24] feat(stub): add missing stubs for new benchs --- .../aws_hash_byte_cursor_ptr_overrides.c | 7 ++ .../aws_hash_string_overrides.c | 7 ++ .../aws-c-common-stubs/aws_logger_overrides.c | 12 ++++ .../aws_nospec_mask_overrides.c | 49 +++++++++++++ .../aws_ref_count_overrides.c | 68 +++++++++++++++++++ .../aws-c-common-stubs/snprintf_overrides.c | 12 ++++ 6 files changed, 155 insertions(+) create mode 100644 seahorn/aws-c-common-stubs/aws_hash_byte_cursor_ptr_overrides.c create mode 100644 seahorn/aws-c-common-stubs/aws_hash_string_overrides.c create mode 100644 seahorn/aws-c-common-stubs/aws_logger_overrides.c create mode 100644 seahorn/aws-c-common-stubs/aws_nospec_mask_overrides.c create mode 100644 seahorn/aws-c-common-stubs/aws_ref_count_overrides.c create mode 100644 seahorn/aws-c-common-stubs/snprintf_overrides.c diff --git a/seahorn/aws-c-common-stubs/aws_hash_byte_cursor_ptr_overrides.c b/seahorn/aws-c-common-stubs/aws_hash_byte_cursor_ptr_overrides.c new file mode 100644 index 00000000..f2b54437 --- /dev/null +++ b/seahorn/aws-c-common-stubs/aws_hash_byte_cursor_ptr_overrides.c @@ -0,0 +1,7 @@ +#include +#include + +uint64_t aws_hash_byte_cursor_ptr(const void *item) { + // (void)item; + return nd_uint64_t(); +} \ No newline at end of file diff --git a/seahorn/aws-c-common-stubs/aws_hash_string_overrides.c b/seahorn/aws-c-common-stubs/aws_hash_string_overrides.c new file mode 100644 index 00000000..1f91efdf --- /dev/null +++ b/seahorn/aws-c-common-stubs/aws_hash_string_overrides.c @@ -0,0 +1,7 @@ +#include +#include + +uint64_t aws_hash_string(const void *item) { + // (void)item; + return nd_uint64_t(); +} \ No newline at end of file diff --git a/seahorn/aws-c-common-stubs/aws_logger_overrides.c b/seahorn/aws-c-common-stubs/aws_logger_overrides.c new file mode 100644 index 00000000..7a842751 --- /dev/null +++ b/seahorn/aws-c-common-stubs/aws_logger_overrides.c @@ -0,0 +1,12 @@ + +#include + +struct aws_logger s_null_logger = { + .vtable = NULL, + .allocator = NULL, + .p_impl = NULL, +}; + +struct aws_logger *aws_logger_get(void) { + return &s_null_logger; +} \ No newline at end of file diff --git a/seahorn/aws-c-common-stubs/aws_nospec_mask_overrides.c b/seahorn/aws-c-common-stubs/aws_nospec_mask_overrides.c new file mode 100644 index 00000000..b5f50fbf --- /dev/null +++ b/seahorn/aws-c-common-stubs/aws_nospec_mask_overrides.c @@ -0,0 +1,49 @@ +#include +#include + +/** + * FUNCTION: aws_nospec_mask + * + * This function overrides the original implementation of the + * aws_nospec_mask function from byte_buf.h with a simplified version. + * + * We follow the specs on the comments below to filter out the checks without + * performing bitwise operations. + * FIXME: now something wrong on the BMC part + */ + +/** + * If index >= bound, bound > (SIZE_MAX / 2), or index > (SIZE_MAX / 2), returns + * 0. Otherwise, returns UINTPTR_MAX. This function is designed to return the correct + * value even under CPU speculation conditions, and is intended to be used for + * SPECTRE mitigation purposes. + */ +size_t aws_nospec_mask(size_t index, size_t bound) { + /* + * SPECTRE mitigation - we compute a mask that will be zero if len < 0 + * or len >= buf->len, and all-ones otherwise, and AND it into the index. + * It is critical that we avoid any branches in this logic. + */ + + /* + * Hide the index value from the optimizer. This helps ensure that all this + * logic doesn't get eliminated. + */ +#if defined(__GNUC__) || defined(__clang__) + __asm__ __volatile__("" : "+r"(index)); +#endif +#if defined(_MSVC_LANG) + /* + * MSVC doesn't have a good way for us to blind the optimizer, and doesn't + * even have inline asm on x64. Some experimentation indicates that this + * hack seems to confuse it sufficiently for our needs. + */ + *((volatile uint8_t *)&index) += 0; +#endif + + if (index >= bound || bound > (SIZE_MAX / 2) || index > (SIZE_MAX / 2)) { + return 0; + } + + return UINTPTR_MAX; +} \ No newline at end of file diff --git a/seahorn/aws-c-common-stubs/aws_ref_count_overrides.c b/seahorn/aws-c-common-stubs/aws_ref_count_overrides.c new file mode 100644 index 00000000..2f1cb883 --- /dev/null +++ b/seahorn/aws-c-common-stubs/aws_ref_count_overrides.c @@ -0,0 +1,68 @@ +#include +#include +#include + +#include +#include +#include + +// GCC built-in __atomic_fetch_add are not supported by seahorn +// In details, the LLVM instruction atomicrmw are not supported. +// SeaHorn crashed on original implementation. + +/** + * @brief Atomically adds a value to a 64-bit integer. + * + * This function performs an atomic addition operation on a 64-bit integer. + * It atomically adds 'val' to the value pointed to by 'ptr' and returns + * the original value of the 64-bit integer before the addition. + * + * @param var A pointer to an integer with type size_t wrapping by struct + * aws_atomic_var to which 'value' will be added. + * @param n The value to be added to the integer pointed to by 'var'. + * @return The original value of the integer with type size_t before the addition. + * + * @note The overrided function is a simpler implmentation for seahorn. + * We omit environment for multi-threading. + */ +size_t sea_atomic_fetch_add(volatile struct aws_atomic_var *var, size_t n) { + size_t *val = (size_t *)&AWS_ATOMIC_VAR_INTVAL(var); + size_t old_val = *val; + *val += n; + return old_val; +} + +/** + * Atomically subtracts n from *var, and returns the previous value of *var. + * Uses sequentially consistent ordering. + */ +size_t sea_atomic_fetch_sub(volatile struct aws_atomic_var *var, size_t n) { + size_t *val = (size_t *)&AWS_ATOMIC_VAR_INTVAL(var); + size_t old_val = *val; + *val -= n; + return old_val; +} + +void aws_ref_count_init(struct aws_ref_count *ref_count, void *object, aws_simple_completion_callback *on_zero_fn) { + aws_atomic_init_int(&ref_count->ref_count, 1); + ref_count->object = object; + ref_count->on_zero_fn = on_zero_fn; +} + +void *aws_ref_count_acquire(struct aws_ref_count *ref_count) { + size_t old_value = sea_atomic_fetch_add(&ref_count->ref_count, 1); + AWS_ASSERT(old_value > 0 && "refcount has been zero, it's invalid to use it again."); + (void)old_value; + + return ref_count->object; +} + +size_t aws_ref_count_release(struct aws_ref_count *ref_count) { + size_t old_value = sea_atomic_fetch_sub(&ref_count->ref_count, 1); + AWS_ASSERT(old_value > 0 && "refcount has gone negative"); + if (old_value == 1) { + ref_count->on_zero_fn(ref_count->object); + } + + return old_value - 1; +} diff --git a/seahorn/aws-c-common-stubs/snprintf_overrides.c b/seahorn/aws-c-common-stubs/snprintf_overrides.c new file mode 100644 index 00000000..7b03e397 --- /dev/null +++ b/seahorn/aws-c-common-stubs/snprintf_overrides.c @@ -0,0 +1,12 @@ +/** + + Implementation of snprintf() + + */ +#include + +int snprintf(char *str, size_t size, const char *format, ...) { + // avoid unhandled instruction for snprintf + // treat as noop + return 1; +} \ No newline at end of file From 7e1b1d39a191445ec6c35b4971b7a44e924bac9b Mon Sep 17 00:00:00 2001 From: Yusen Su Date: Mon, 23 Sep 2024 16:08:05 -0400 Subject: [PATCH 24/24] feat(submodule): add aws-c-libs into submodule --- .gitmodules | 15 +++++++++++++++ aws-c-cal | 1 + aws-c-common | 1 + aws-c-compression | 1 + aws-c-io | 1 + aws-c-sdkutils | 1 + 6 files changed, 20 insertions(+) create mode 100644 .gitmodules create mode 160000 aws-c-cal create mode 160000 aws-c-common create mode 160000 aws-c-compression create mode 160000 aws-c-io create mode 160000 aws-c-sdkutils diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..766dc8cc --- /dev/null +++ b/.gitmodules @@ -0,0 +1,15 @@ +[submodule "aws-c-common"] + path = aws-c-common + url = https://github.com/awslabs/aws-c-common +[submodule "aws-c-cal"] + path = aws-c-cal + url = https://github.com/awslabs/aws-c-cal +[submodule "aws-c-compression"] + path = aws-c-compression + url = https://github.com/awslabs/aws-c-compression +[submodule "aws-c-io"] + path = aws-c-io + url = https://github.com/awslabs/aws-c-io +[submodule "aws-c-sdkutils"] + path = aws-c-sdkutils + url = https://github.com/awslabs/aws-c-sdkutils diff --git a/aws-c-cal b/aws-c-cal new file mode 160000 index 00000000..13b1e969 --- /dev/null +++ b/aws-c-cal @@ -0,0 +1 @@ +Subproject commit 13b1e969b7002337120a2da9868e53cc811f2aa4 diff --git a/aws-c-common b/aws-c-common new file mode 160000 index 00000000..4ffa8864 --- /dev/null +++ b/aws-c-common @@ -0,0 +1 @@ +Subproject commit 4ffa8864ef436d1384d75eab7119ebcfae492b73 diff --git a/aws-c-compression b/aws-c-compression new file mode 160000 index 00000000..7d66baa6 --- /dev/null +++ b/aws-c-compression @@ -0,0 +1 @@ +Subproject commit 7d66baa6fcc6a163e9bf906567f1c9eb7c886065 diff --git a/aws-c-io b/aws-c-io new file mode 160000 index 00000000..0248644a --- /dev/null +++ b/aws-c-io @@ -0,0 +1 @@ +Subproject commit 0248644ab0fbdfac414ed919c426b4d1302b79ea diff --git a/aws-c-sdkutils b/aws-c-sdkutils new file mode 160000 index 00000000..9d918f0b --- /dev/null +++ b/aws-c-sdkutils @@ -0,0 +1 @@ +Subproject commit 9d918f0bb8587165efe2d8c8987e99e72d2402c9