Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integrate build system for crab inferring sea.is_dereferenceable #102

Draft
wants to merge 24 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
29d60db
feat(yaml): add yaml file for running seahorn with crab
LinerSu Sep 22, 2021
1397518
feat(lib): add nd_xx function for clam
LinerSu Sep 22, 2021
f9ed072
feat(cmake): modify cmake to allow crab infer sea.is_dereferenceable
LinerSu Sep 22, 2021
1ff789a
feat(lib): add assumptions for string not empty and malloc cannot fai…
LinerSu Sep 22, 2021
a5449e2
feat(jobs): adapt crab interpret predicate function
LinerSu Sep 22, 2021
65cf58d
fix(py): update script to collect crab's results
LinerSu Sep 23, 2021
df96eaa
fix(yaml): extend crab yaml by using sea_base.yaml
LinerSu Sep 23, 2021
779d5f6
fix(py): update scripts to collect listing statistics
LinerSu Sep 28, 2021
58fdd0d
feat(py): introduce new statistic to collect crab running time
LinerSu Apr 20, 2022
aba9c21
feat(crab): use macro to avoid crab infer 0 size allocation
LinerSu May 10, 2022
20639a1
feat(yaml): adapt yaml config with new sea-crab options
LinerSu May 10, 2022
a920e96
feat(crab): adpat malloc not fail for crab
LinerSu May 13, 2022
491e332
feat(jobs): add hard work for crab
LinerSu May 13, 2022
aa898d3
feat(crab): adpat assumption to no-disjunctive
LinerSu Jun 7, 2022
0993691
feat(scripts): update scripts to collect statistics for seapp
LinerSu Jun 7, 2022
a88f476
test(clam): add configuration for clam
LinerSu Mar 18, 2023
01b8789
feat(jobs): add symbiotic to cover all workable unit proofs
LinerSu Jan 5, 2022
8473f4a
feat(scripts): add symbiotic into experimental script
LinerSu Jan 10, 2022
d8d2980
fix(py): update script to collect crab's results
LinerSu Sep 23, 2021
eebe6ca
test(clam): add configuration for clam
LinerSu Mar 18, 2023
847024b
feat(jobs): add more crab settings
Feb 21, 2024
faf8f24
feat: config yamls and fix scripts
Mar 1, 2024
1c16e59
feat(stub): add missing stubs for new benchs
Mar 1, 2024
7e1b1d3
feat(submodule): add aws-c-libs into submodule
LinerSu Sep 23, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
build
build*
build_fuzz
debug
cex
Expand Down
15 changes: 15 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
[submodule "aws-c-common"]
path = aws-c-common
url = https://github.com/awslabs/aws-c-common
[submodule "aws-c-cal"]
path = aws-c-cal
url = https://github.com/awslabs/aws-c-cal
[submodule "aws-c-compression"]
path = aws-c-compression
url = https://github.com/awslabs/aws-c-compression
[submodule "aws-c-io"]
path = aws-c-io
url = https://github.com/awslabs/aws-c-io
[submodule "aws-c-sdkutils"]
path = aws-c-sdkutils
url = https://github.com/awslabs/aws-c-sdkutils
10 changes: 10 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ option(SEA_LLVM_MEM_BUILTINS "Use LLVM builtins memcpy/memmove without memory sa


option(SEA_ENABLE_FUZZ "Enable fuzzing" OFF)
option(SEA_ENABLE_CRAB "Enable Seahorn using Crab" OFF)
option(SEA_ENABLE_CLAM "Enable Seahorn using Clam" OFF)
option(SEA_WITH_BLEEDING_EDGE "Enable bleeding edge proofs" OFF)

option(SEA_ENABLE_KLEE "Enable klee" OFF)
Expand Down Expand Up @@ -57,6 +59,10 @@ set(SEA_PP "${SEAHORN_ROOT}/bin/seapp" CACHE STRING "Path to seapp binary")
set(LLVMIR_OPT ${SEA_OPT})

set(AWS_C_COMMON_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/aws-c-common)
set(AWS_C_COMPRESSION_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/aws-c-compression)
set(AWS_C_SDKUTILS_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/aws-c-sdkutils)
set(AWS_C_IO_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/aws-c-io)
set(AWS_C_CAL_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/aws-c-cal)

if(SEA_ENABLE_FUZZ)
find_package(aws-c-common REQUIRED)
Expand All @@ -74,5 +80,9 @@ include_directories(BEFORE ${CMAKE_CURRENT_BINARY_DIR}/include)

include_directories(seahorn/include)
include_directories(aws-c-common/include)
include_directories(aws-c-compression/include)
include_directories(aws-c-sdkutils/include)
include_directories(aws-c-io/include)
include_directories(aws-c-cal/include)
include_directories(${SEAHORN_ROOT}/include)
add_subdirectory(seahorn)
1 change: 1 addition & 0 deletions aws-c-cal
Submodule aws-c-cal added at 13b1e9
1 change: 1 addition & 0 deletions aws-c-common
Submodule aws-c-common added at 4ffa88
1 change: 1 addition & 0 deletions aws-c-compression
Submodule aws-c-compression added at 7d66ba
1 change: 1 addition & 0 deletions aws-c-io
Submodule aws-c-io added at 024864
1 change: 1 addition & 0 deletions aws-c-sdkutils
Submodule aws-c-sdkutils added at 9d918f
62 changes: 57 additions & 5 deletions scripts/get_exper_brunch_stat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import csv
import argparse
import yaml
import glob
import glob, os
from collections import defaultdict

METRICS_FILE = "metrics.yaml"
Expand All @@ -17,14 +17,16 @@ def read_brunchstat_from_log(log_file_name):
cur_test = None
while line:
# look for next test
new_test = re.search("Test: ", line)
new_test = re.search("Testing: ", line)
if new_test:
new_test_data = defaultdict(lambda: 'n/a')
span = new_test.span()
test_name = line[span[1]:]
# remove _unsat_test
cur_test = test_name[:-12]
new_test_data['job_name'] = cur_test
new_test_data['pp_loc'] = get_loc_info(cur_test, True)
new_test_data['opsem_loc'] = get_loc_info(cur_test, False)
data.append(new_test_data)
elif line.startswith("BRUNCH_STAT"):
stat = line.split()
Expand All @@ -37,6 +39,27 @@ def read_brunchstat_from_log(log_file_name):
log_file.close()
return data

def get_loc_info(file_name, is_pp):
# Directory where the files are located
directory_path = "/tmp/verify-c-common/"

ir_name = f"{file_name}.ir.peel.fat.pp.ms.bc" if is_pp else f"{file_name}.ir.peel.fat.pp.ms.crab.o.ul.cut.o.bc"

# Construct the full path to the file with its pattern
full_path = os.path.join(directory_path, ir_name)

# Get the LOC of the file
LOC = 'n/a'
with open(full_path, 'r') as file:
lines = file.readlines()
start = False
for line in lines:
if start:
LOC += 1
if '@main' in line:
LOC = 0
start = True
return LOC

def read_symbiotic_bruchstat_from_log(log_file_name, xml_file_dir, time_out):
log_file = open(log_file_name, 'r')
Expand Down Expand Up @@ -115,13 +138,42 @@ def write_brunchstat_into_csv(data, out_file):
for job_data in data:
row = (job_data[k] for k, _ in metric_serlz)
writer.writerow(row)
print(f'[Statistics] Please find stat csv file at {out_file}')


# def read_timimg_results(result_dir):
# log_file = open(log_file_name, 'r')
# line = log_file.readline()
# data = defaultdict(list)
# row_dict = {}
# while line:
# if "************** ANALYSIS RESULTS ****************" in line:
# result_status = Status.SUCCESS
# if "** OS Error:" in line:
# result_status = Status.TIMEOUT
# if "BRUNCH_STAT" in line:
# parts = line.strip().split()
# name = parts[1]
# if name == "Clam":
# number = float(parts[2])
# if result_status == Status.CLAM_ERROR:
# pairs.append([dirpath.strip().split('/')[1], "error"])
# elif result_status == Status.TIMEOUT:
# pairs.append([dirpath.strip().split('/')[1], "timeout"])
# else:
# pairs.append([dirpath.strip().split('/')[1], number])
# draw_results_table(pairs, result_dir)

if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='in and out files')
parser.add_argument('logfile', type=str, help=".log file to read from")
parser.add_argument('--outfile', '-o', type=str,
default="brunch_stat.csv", help="ouput csv filename")
parser.add_argument(
'--outfile',
'-o',
type=str,
default="brunch_stat.csv",
help="ouput csv filename")
args = parser.parse_args()
data = read_brunchstat_from_log(args.logfile)
write_brunchstat_into_csv(data, args.outfile)
write_brunchstat_into_csv(data, args.outfile)
Loading
Loading