Skip to content
Snippets Groups Projects
run_evaluation_state_explosion.py 4.11 KiB
Newer Older
# run_evaluation_overhead_avail.py
from datatypes.subset_projection.GlobalTypeFSM import GlobalTypeFSM
from global_types.parametric.ParametricGlobalTypes import *
Felix Stutz's avatar
Felix Stutz committed
from evaluation_functionality.data_processing.processing_projection_data import *
from evaluation_functionality.data_processing.plotting_overhead import *
import sys
import os
import resource

from parsing.InputParser import get_gt_from_file

# TODO: move?
PREFIX_EVAL = "evaluation_data/state_explosion/"

EXAMPLES_STATE_EXPLOSION = {
                            ParametricKinds.STATE_EXPLOSION
                         }

def run_state_space_explosion_analysis(num_size):
    all_eval_folder_path = PREFIX_EVAL + "evaluation_<" + \
                           str(num_size) + "_" + \
                           str(trunc(time.time())) + "/"
    os.mkdir(all_eval_folder_path)
    for example in EXAMPLES_STATE_EXPLOSION:
        # a) write global type representations up to num_size
        write_global_types_up_to_size(example, num_size)
        # b) specific eval folder
        spec_eval_folder_path = all_eval_folder_path + example.value + "/"
        os.mkdir(spec_eval_folder_path)
        # b) project types up to size
        os.environ['VALIDITY_CHECK'] = "FALSE"
        compute_subset_constrution_up_to_size(example, spec_eval_folder_path, num_size, True)
        os.environ['VALIDITY_CHECK'] = "TRUE"
        # c) create mappings
        # write_param_file_to_file(spec_eval_folder_path, example)
        # d) compute averages
        # write_mst_averages_to_file(spec_eval_folder_path, example)
    return all_eval_folder_path


def write_global_types_up_to_size(whichone, size, snd=None):
    increase_rec_depth_and_RAM(ram=1000*1024*1024, rec_limit=10000)
    ParametricGlobalTypes.write_representation_up_to_size_w_kind(whichone, size, snd)


def compute_subset_constrution_up_to_size(whichone, eval_path, num_size, check_avail=True, snd=None, prefix=''):
    increase_rec_depth_and_RAM(ram=1000*1024*1024, rec_limit=10000)
    kind = whichone
    max_size = num_size
    # asserts that global types have been produced before
    res_file_prefix = eval_path
    # TODO: move location out
    filename_prefix = "global_types/parametric/" + kind.value + "/" + kind.value + "_"
    start = 2
    # res_file_name = res_file_prefix + 'evaluation_max' + str(max_size) + '_' + \
    #                 str(trunc(time.time())) + '.txt'
    res_file_name = res_file_prefix + "evaluation_data_state_explosion.txt"
    text_file = open(res_file_name, "w+")
    # TODO: move tableheader out
    res = "{:8}".format("Size") + \
          "{:8}".format("Para") + \
          "{:35}".format("Size P") + \
          "{:35}".format("Size Q") + \
          "{:35}".format("Size R") + \
          "{:35}".format("Size All") + \
            "\n"
    text_file.write(res)
    i = start
    filename = filename_prefix + str(i) + ".gt"
    gt = get_gt_from_file(filename)
    while gt.get_size() <= max_size:
        filename = filename_prefix + str(i) + ".gt"
        gt = get_gt_from_file(filename)
        fsm = GlobalTypeFSM(gt)
        start = time.time()
        size_projs = dict()
        for proc in gt.get_procs():
            proj = fsm.project_onto(proc)
            size_projs[proc] = proj.get_size()
        end = time.time()
        this_time_overall = 1000 * (end-start)
        size_all = size_projs["P"] + size_projs["Q"] + size_projs["R"]
        res = "{:8}".format(str(gt.get_size())) + \
              "{:8}".format(str(i)) + \
              "{:35}".format(str(size_projs["P"])) + \
              "{:35}".format(str(size_projs["Q"])) + \
              "{:35}".format(str(size_projs["R"])) + \
              "{:35}".format(str(size_all)) + '\n'
        text_file.write(res)
        text_file.flush()
        # TODO: move timeout out
        # break for timeout
        if this_time_overall >= 1000*60*10:
            break
        i += 1
    text_file.close()

# TODO: move
# helper function to increase recursion depth and RAM
def increase_rec_depth_and_RAM(ram, rec_limit):
    resource.setrlimit(resource.RLIMIT_STACK, [ram, -1])
    # increase recursion limit 10times compared to default
    sys.setrecursionlimit(rec_limit)