-
Notifications
You must be signed in to change notification settings - Fork 2
/
ResultTable.fs
209 lines (187 loc) · 9.69 KB
/
ResultTable.fs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
module RInGen.ResultTable
open SMTLIB2
open System.Collections.Generic
open System.IO
open System.Globalization
open CsvHelper
open SolverResult
let private substituteRelations exts filenames =
let change f (f1, f2) = f f1, f f2
List.map2 (fun ext -> change <| fun filename -> Path.ChangeExtension(filename, ext)) exts filenames
let private collectAllResults (exts : string list) originalDirectory transAndResultDirs=
let results = Dictionary<_, _>()
let generateResultLine (testName : path) transAndResultFileNames =
let transAndResultFileNames = substituteRelations exts transAndResultFileNames
let line = transAndResultFileNames |> List.map Statistics.tryReadStatistics
results.Add(testName, line)
walk_through_simultaneously originalDirectory transAndResultDirs generateResultLine
results |> Dictionary.toList
type private consistency = HasNo | HasSAT | HasUNSAT | HasBoth
let private checkConsistency testName line =
let isConsistent c = function
| Some (Statistics.SolvingStatus x) ->
let x =
match x.solverResult with
| SAT _ -> 0b01
| UNSAT _ -> 0b10
| _ -> 0b0
let c = c ||| x
if c = 0b11 then None else Some c
| _ -> Some c
match List.foldChoose isConsistent 0b0 line with
| None -> print_err_verbose $"test {testName} has inconsistent answers"
| _ -> ()
let private BuildResultTable writeHeader (writeResult : CsvWriter -> Statistics.status option -> unit) writeStatistics (filename : string) header results =
use writer = new StreamWriter(filename)
use csv = new CsvWriter(writer, CultureInfo.InvariantCulture)
csv.WriteField("Filename")
for headerEntry in header do
writeHeader csv headerEntry
csv.NextRecord()
writeStatistics csv results
for testName, line in results do
csv.WriteField(testName)
checkConsistency testName line
for result in line do
writeResult csv result
csv.NextRecord()
filename
let private GenerateResultTable writeHeader writeResult writeStatistics originalDirectory (names : string list) (exts : string list) transAndResultDirs =
let results = collectAllResults exts originalDirectory transAndResultDirs
let tablePath = Path.ChangeExtension(originalDirectory, ".csv")
BuildResultTable writeHeader writeResult writeStatistics tablePath names results
module Checkers =
type Checker (name, checker : Statistics.status -> bool, checkUnique) =
member x.Name = $"""%s{name}%s{if checkUnique then " Unique" else ""}"""
member x.Check result line = checker result && (not checkUnique || List.countWith checker line = 1)
new (name, checker) = Checker(name, checker, false)
let ifSolvingResult f = function Statistics.SolvingStatus stat -> f stat.solverResult | _ -> false
let isSAT = function SAT _ -> true | _ -> false
let isSAT_FM = function SAT(FiniteModel _) -> true | _ -> false
let isSAT_Elem = function SAT ElemFormula -> true | _ -> false
let isSAT_SizeElem = function SAT SizeElemFormula -> true | _ -> false
let isUNSAT = function UNSAT _ -> true | _ -> false
let isERROR = function ERROR _ -> true | _ -> false
let isUNKNOWN = function UNKNOWN _ -> true | _ -> false
let isTIMELIMIT = function SOLVER_TIMELIMIT -> true | _ -> false
let isOUTOFMEMORY = function OUTOFMEMORY -> true | _ -> false
let isTIMELIMIT_TRANS = (=) <| Statistics.TransformationStatus Transformers.TRANS_TIMELIMIT
let isOTHER_TRANS = function Statistics.TransformationStatus s -> s <> Transformers.TRANS_TIMELIMIT | _ -> false
let checkers = [
Checker("SAT", ifSolvingResult isSAT)
Checker("SAT", ifSolvingResult isSAT, true)
Checker("SAT: Finite Model", ifSolvingResult isSAT_FM)
Checker("SAT: Elem Formula", ifSolvingResult isSAT_Elem)
Checker("SAT: SizeElem Formula", ifSolvingResult isSAT_SizeElem)
Checker("UNSAT", ifSolvingResult isUNSAT)
Checker("UNSAT", ifSolvingResult isUNSAT, true)
Checker("SKIPPED BY TRANSFORMING", isOTHER_TRANS)
Checker("TIMELIMIT IN TRANSFORMING", isTIMELIMIT_TRANS)
Checker("UNKNOWN", ifSolvingResult isUNKNOWN)
Checker("ERROR", ifSolvingResult isERROR)
Checker("TIMELIMIT IN SOLVING", ifSolvingResult isTIMELIMIT)
Checker("OUTOFMEMORY", ifSolvingResult isOUTOFMEMORY)
]
let empty = List.map (fun s -> s, 0) checkers
let add result line =
let addOne (checker : Checker, count) =
checker, if checker.Check result line then count + 1 else count
List.map addOne
let private solverColumnNames solverName =
List.map (fun fieldName -> $"%s{solverName}{fieldName}") Statistics.fieldNames
let private writeSolverColumnNames writer columnNames = List.iter writer columnNames
let private generateAndWriteSolverColumnNames writer solverName = solverColumnNames solverName |> writeSolverColumnNames writer
module private ReadableTable =
let writeResult (csv : CsvWriter) statOpt =
statOpt |> Option.map (Statistics.mapResult compactStatus)
|> Statistics.iterTry csv.WriteField
let writeCheckers (csv : CsvWriter) results =
let collectCheckers chk line = function
| Some stat -> Checkers.add stat line chk
| None -> chk
let results = List.map snd results
let columns = results |> List.transpose
let rows = results |> List.map (List.choose id)
let statistics = List.map (List.fold2 collectCheckers Checkers.empty rows >> List.map snd) columns |> List.transpose
for checker, line in List.zip Checkers.checkers statistics do
csv.WriteField(checker.Name)
for stat in line do
Statistics.iterExceptLast (fun () -> csv.WriteField($"%d{stat}")) (fun () -> csv.WriteField(""))
csv.NextRecord()
let GenerateReadableResultTable =
let writeHeader (csv : CsvWriter) solverName =
generateAndWriteSolverColumnNames csv.WriteField solverName
GenerateResultTable writeHeader ReadableTable.writeResult ReadableTable.writeCheckers
//let GenerateLaTeXResultTable =
// let timeToString n = $"%d{n}"
// let TIMEOUT = timeToString <| 2 * (MSECONDS_TIMEOUT ())
// let writeEmptyResult (csv : CsvWriter) =
// csv.WriteField(TIMEOUT)
//
// let writeResult (csv : CsvWriter) = function
// | Some (time, answer) ->
// match answer with
// | SAT _
// | UNSAT
// | TIMELIMIT -> csv.WriteField(timeToString time)
// | _ -> writeEmptyResult csv
// | None -> writeEmptyResult csv
//
// let writeHeader (csv : CsvWriter) solverName =
// csv.WriteField(solverName)
//
// GenerateResultTable writeHeader writeResult (fun _ _ -> ())
//let PrintReadableResultTable names directories =
// let timeWidth, resultWidth, nameWidth =
// let mutable timeWidth = 1
// let mutable resultWidth = 1
// let mutable nameWidth = 1
// let calcWidths (testName : string) resultFileNames =
// nameWidth <- max nameWidth testName.Length
// for resultFileName in resultFileNames do
// match rawFileResult resultFileName with
// | Some (time, answer) ->
// timeWidth <- max timeWidth time.Length
// resultWidth <- max resultWidth answer.Length
// | None -> ()
// walk_through_simultaneously directories calcWidths
// timeWidth, resultWidth, nameWidth
// let printResultLine (testName : string) resultFileNames =
// printf "%s " <| testName.PadRight(nameWidth)
// for resultFileName in resultFileNames do
// let time, answer = Option.defaultValue ("", "") (rawFileResult resultFileName)
// let time = time.PadRight(timeWidth)
// let answer = answer.PadRight(resultWidth)
// printf $"%s{time} %s{answer} "
// printfn ""
// printf "%s " ("Name".PadRight(nameWidth))
// names |> List.map (fun (name : string) -> name.PadRight(timeWidth + resultWidth + 2)) |> join "" |> printfn "%s"
// walk_through_simultaneously directories printResultLine
let private dropStatistics (csv : CsvReader) =
List.iter (fun _ -> csv.Read() |> ignore) Checkers.checkers
let private loadTable (csv : CsvReader) =
csv.Read() |> ignore
csv.ReadHeader() |> ignore
let filenameField, header = csv.Context.HeaderRecord |> List.ofArray |> List.uncons
dropStatistics csv
let table = Dictionary()
while csv.Read() do
let results = header |> List.map csv.GetField
let stats = Statistics.tryCollectStatistics results
table.Add(csv.GetField(filenameField), stats)
header, table
let private addNewResults (table : Dictionary<_,_>) transDirectory resultsDirectory =
let addNewResults relativeFilename =
let transFileName = Path.Combine(transDirectory, relativeFilename)
let resultFileName = Path.Combine(resultsDirectory, relativeFilename)
let result = Statistics.tryReadStatistics(transFileName, resultFileName)
table.[relativeFilename] <- List.addLast result table.[relativeFilename]
walk_through_relative resultsDirectory addNewResults
let AddResultsToTable (tablePath : string) solverName transDirectory resultsDirectory =
use reader = new StreamReader(tablePath)
use csv = new CsvReader(reader, CultureInfo.InvariantCulture)
let header, table = loadTable csv
addNewResults table transDirectory resultsDirectory // modifies table
let header = header @ solverColumnNames solverName
let table = Dictionary.toList table
BuildResultTable (fun csv -> csv.WriteField) ReadableTable.writeResult ReadableTable.writeCheckers tablePath header table