Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
# run_evaluation_overhead_avail.py
from datatypes.GlobalTypeFSM import GlobalTypeFSM
from global_types.parametric.ParametricGlobalTypes import *
from data_processing.processing_projection_data import *
from data_processing.plotting_overhead import *
import sys
import os
import resource
from parsing.InputParser import get_input_from_file, get_gt_from_file
# TODO: move?
PREFIX_EVAL = "evaluation_data/state_explosion/"
EXAMPLES_STATE_EXPLOSION = {
ParametricKinds.STATE_EXPLOSION
}
def run_state_space_explosion_analysis(num_size):
all_eval_folder_path = PREFIX_EVAL + "evaluation_<" + \
str(num_size) + "_" + \
str(trunc(time.time())) + "/"
os.mkdir(all_eval_folder_path)
for example in EXAMPLES_STATE_EXPLOSION:
# a) write global type representations up to num_size
write_global_types_up_to_size(example, num_size)
# b) specific eval folder
spec_eval_folder_path = all_eval_folder_path + example.value + "/"
os.mkdir(spec_eval_folder_path)
# b) project types up to size
os.environ['VALIDITY_CHECK'] = "FALSE"
compute_subset_constrution_up_to_size(example, spec_eval_folder_path, num_size, True)
os.environ['VALIDITY_CHECK'] = "TRUE"
# c) create mappings
# write_param_file_to_file(spec_eval_folder_path, example)
# d) compute averages
# write_mst_averages_to_file(spec_eval_folder_path, example)
return all_eval_folder_path
def write_global_types_up_to_size(whichone, size, snd=None):
increase_rec_depth_and_RAM(ram=1000*1024*1024, rec_limit=10000)
ParametricGlobalTypes.write_representation_up_to_size_w_kind(whichone, size, snd)
def compute_subset_constrution_up_to_size(whichone, eval_path, num_size, check_avail=True, snd=None, prefix=''):
increase_rec_depth_and_RAM(ram=1000*1024*1024, rec_limit=10000)
kind = whichone
max_size = num_size
# asserts that global types have been produced before
res_file_prefix = eval_path
# TODO: move location out
filename_prefix = "global_types/parametric/" + kind.value + "/" + kind.value + "_"
start = 2
# res_file_name = res_file_prefix + 'evaluation_max' + str(max_size) + '_' + \
# str(trunc(time.time())) + '.txt'
res_file_name = res_file_prefix + "evaluation_data_state_explosion.txt"
text_file = open(res_file_name, "w+")
# TODO: move tableheader out
res = "{:8}".format("Size") + \
"{:8}".format("Para") + \
"{:35}".format("Size P") + \
"{:35}".format("Size Q") + \
"{:35}".format("Size R") + \
"{:35}".format("Size All") + \
"\n"
text_file.write(res)
i = start
filename = filename_prefix + str(i) + ".gt"
gt = get_gt_from_file(filename)
while gt.get_size() <= max_size:
filename = filename_prefix + str(i) + ".gt"
gt = get_gt_from_file(filename)
fsm = GlobalTypeFSM(gt)
start = time.time()
size_projs = dict()
for proc in gt.get_procs():
proj = fsm.project_onto(proc)
size_projs[proc] = proj.get_size()
end = time.time()
this_time_overall = 1000 * (end-start)
size_all = size_projs["P"] + size_projs["Q"] + size_projs["R"]
res = "{:8}".format(str(gt.get_size())) + \
"{:8}".format(str(i)) + \
"{:35}".format(str(size_projs["P"])) + \
"{:35}".format(str(size_projs["Q"])) + \
"{:35}".format(str(size_projs["R"])) + \
"{:35}".format(str(size_all)) + '\n'
text_file.write(res)
text_file.flush()
# TODO: move timeout out
# break for timeout
if this_time_overall >= 1000*60*10:
break
i += 1
text_file.close()
# TODO: move
# helper function to increase recursion depth and RAM
def increase_rec_depth_and_RAM(ram, rec_limit):
resource.setrlimit(resource.RLIMIT_STACK, [ram, -1])
# increase recursion limit 10times compared to default
sys.setrecursionlimit(rec_limit)