1
0
Fork 0

Added results and simulators

master
Luca Moretti 4 years ago
parent 18afb1f08e
commit 301ff69a8d
  1. 2
      main_package/classes/estimators/fam_score_calculator.py
  2. 3
      main_package/classes/estimators/structure_constraint_based_estimator.py
  3. 17
      main_package/classes/estimators/structure_score_based_estimator.py
  4. 2
      main_package/classes/utility/cache.py
  5. 15
      main_package/classes/utility/decorators.py
  6. 1153
      main_package/tests/results/results.csv
  7. 108
      main_package/tests/simulators/test_simulation_constraint.py
  8. 121
      main_package/tests/simulators/test_simulation_score.py

@ -31,7 +31,7 @@ class FamScoreCalculator:
"""
def __init__(self):
np.seterr('raise')
#np.seterr('raise')
pass
# region theta

@ -19,6 +19,8 @@ import structure_graph.sample_path as sp
import structure_graph.structure as st
import optimizers.constraint_based_optimizer as optimizer
from utility.decorators import timing,timing_write
class StructureConstraintBasedEstimator(se.StructureEstimator):
"""
@ -209,6 +211,7 @@ class StructureConstraintBasedEstimator(se.StructureEstimator):
tot_vars_count = tot_vars_count)
optimizer_obj.optimize_structure()
@timing_write
def ctpc_algorithm(self):
"""
Compute the CTPC algorithm.

@ -22,7 +22,7 @@ import estimators.fam_score_calculator as fam_score
import optimizers.hill_climbing_search as hill
import optimizers.tabu_search as tabu
from utility.decorators import timing
from utility.decorators import timing,timing_write
@ -33,10 +33,6 @@ from multiprocessing import Pool
'''
#TODO: Create a parent class StructureEstimator and Two Subclasses (Score-Based and Constraint-Based)
#TODO: Evaluate if it could be better to change list_edges to set for improve the performance
'''
class StructureScoreBasedEstimator(se.StructureEstimator):
"""
@ -49,10 +45,10 @@ class StructureScoreBasedEstimator(se.StructureEstimator):
super().__init__(sample_path)
@timing
@timing_write
def estimate_structure(self, max_parents:int = None, iterations_number:int= 40,
patience:int = None, tabu_length:int = None, tabu_rules_duration:int = None,
optimizer: str = 'hill' ):
optimizer: str = 'hill',disable_multiprocessing:bool= False ):
"""
Compute the score-based algorithm to find the optimal structure
@ -60,6 +56,10 @@ class StructureScoreBasedEstimator(se.StructureEstimator):
max_parents: maximum number of parents for each variable. If None, disabled
iterations_number: maximum number of optimization algorithm's iteration
patience: number of iteration without any improvement before to stop the search.If None, disabled
tabu_length: maximum lenght of the data structures used in the optimization process
tabu_rules_duration: number of iterations in which each rule keeps its value
optimzer: name of the optimizer algorithm. Possible values: 'hill' (Hill climbing),'tabu' (tabu search)
disable_multiprocessing: true if you desire to disable the multiprocessing operations
Returns:
void
@ -86,7 +86,8 @@ class StructureScoreBasedEstimator(se.StructureEstimator):
'get the number of CPU'
cpu_count = multiprocessing.cpu_count()
#cpu_count = 1
if disable_multiprocessing:
cpu_count = 1
'Estimate the best parents for each node'
with multiprocessing.Pool(processes=cpu_count) as pool:

@ -32,7 +32,7 @@ class Cache:
#print("Cache State:", self.list_of_sets_of_indxs)
#print("Look For:", parents_comb)
result = self.actual_cache[self.list_of_sets_of_parents.index(parents_comb)]
print("CACHE HIT!!!!", parents_comb)
#print("CACHE HIT!!!!", parents_comb)
return result
except ValueError:
return None

@ -9,5 +9,20 @@ def timing(f):
te = time()
print (f"{f.__name__} args:[{args},{kw}] took: {te-ts} sec")
return result
return wrap
def timing_write(f):
@wraps(f)
def wrap(*args, **kw):
ts = time()
result = f(*args, **kw)
te = time()
print (f"{f.__name__} args:[{args},{kw}] took: {te-ts} sec")
with open("../results/results.csv", 'a+') as fi:
fi.write(f"\n{round(te-ts,3)},")
return result
return wrap

File diff suppressed because it is too large Load Diff

@ -0,0 +1,108 @@
import sys
sys.path.append("../../classes/")
import glob
import math
import os
import unittest
import networkx as nx
import numpy as np
import psutil
from line_profiler import LineProfiler
import copy
import utility.cache as ch
import structure_graph.sample_path as sp
import estimators.structure_constraint_based_estimator as se
import utility.json_importer as ji
class TestTabuSearch(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
def test_constr(self):
list_vals= [3,4,5,6,10,15]
list_dens = [["0.1","_01"],["0.2","_02"], ["0.3",""], ["0.4","_04"] ]
for dens in list_dens:
for var_n in list_vals:
var_number= var_n
cardinality = 4
cardinality_string = "quaternary"
density= dens[0]
density_string = dens[1]
constraint = 1
index = 0
num_networks=10
if var_number > 9:
num_networks=3
while index < num_networks:
#cls.read_files = glob.glob(os.path.join('../../data', "*.json"))
self.importer = ji.JsonImporter(f"../../data/networks_and_trajectories_{cardinality_string}_data{density_string}_{var_number}.json",
'samples', 'dyn.str', 'variables', 'Time', 'Name', index )
self.s1 = sp.SamplePath(self.importer)
self.s1.build_trajectories()
self.s1.build_structure()
true_edges = copy.deepcopy(self.s1.structure.edges)
true_edges = set(map(tuple, true_edges))
se1 = se.StructureConstraintBasedEstimator(self.s1, 0.1, 0.1)
se1.ctpc_algorithm()
set_list_edges = set(se1.complete_graph.edges)
n_added_fake_edges = len(set_list_edges.difference(true_edges))
n_missing_edges = len(true_edges.difference(set_list_edges))
n_true_positive = len(true_edges) - n_missing_edges
precision = n_true_positive / (n_true_positive + n_added_fake_edges)
recall = n_true_positive / (n_true_positive + n_missing_edges)
f1_measure = round(2* (precision*recall) / (precision+recall),3)
# print(f"n archi reali non trovati: {n_missing_edges}")
# print(f"n archi non reali aggiunti: {n_added_fake_edges}")
print(true_edges)
print(set_list_edges)
print(f"precision: {precision} ")
print(f"recall: {recall} ")
with open("../results/results.csv", 'a+') as fi:
fi.write(f"{constraint},{var_number},{density},{cardinality},{index},{f1_measure},{round(precision,3)},{round(recall,3)}")
index += 1
self.assertEqual(set_list_edges, true_edges)
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,121 @@
import sys
sys.path.append("../../classes/")
import glob
import math
import os
import unittest
import networkx as nx
import numpy as np
import psutil
from line_profiler import LineProfiler
import copy
import utility.cache as ch
import structure_graph.sample_path as sp
import estimators.structure_score_based_estimator as se
import utility.json_importer as ji
class TestTabuSearch(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
def test_constr(self):
list_vals= [3,4,5,6,10,15]
list_dens = [["0.1","_01"],["0.2","_02"], ["0.3",""], ["0.4","_04"] ]
for dens in list_dens:
for var_n in list_vals:
patience = 20
var_number= var_n
if var_number > 11:
patience = 25
if var_number > 16:
patience = 35
cardinality = 4
cardinality_string = "quaternary"
density= dens[0]
density_string = dens[1]
constraint = 0
index = 0
num_networks=10
if var_number > 9:
num_networks=3
while index < num_networks:
#cls.read_files = glob.glob(os.path.join('../../data', "*.json"))
self.importer = ji.JsonImporter(f"../../data/networks_and_trajectories_{cardinality_string}_data{density_string}_{var_number}.json",
'samples', 'dyn.str', 'variables', 'Time', 'Name', index )
self.s1 = sp.SamplePath(self.importer)
self.s1.build_trajectories()
self.s1.build_structure()
true_edges = copy.deepcopy(self.s1.structure.edges)
true_edges = set(map(tuple, true_edges))
se1 = se.StructureScoreBasedEstimator(self.s1)
set_list_edges = se1.estimate_structure(
max_parents = None,
iterations_number = 100,
patience = patience,
tabu_length = var_number,
tabu_rules_duration = var_number,
optimizer = 'tabu'
)
n_added_fake_edges = len(set_list_edges.difference(true_edges))
n_missing_edges = len(true_edges.difference(set_list_edges))
n_true_positive = len(true_edges) - n_missing_edges
precision = n_true_positive / (n_true_positive + n_added_fake_edges)
recall = n_true_positive / (n_true_positive + n_missing_edges)
f1_measure = round(2* (precision*recall) / (precision+recall),3)
# print(f"n archi reali non trovati: {n_missing_edges}")
# print(f"n archi non reali aggiunti: {n_added_fake_edges}")
print(true_edges)
print(set_list_edges)
print(f"precision: {precision} ")
print(f"recall: {recall} ")
with open("../results/results.csv", 'a+') as fi:
fi.write(f"{constraint},{var_number},{density},{cardinality},{index},{f1_measure},{round(precision,3)},{round(recall,3)}")
index += 1
self.assertEqual(set_list_edges, true_edges)
if __name__ == '__main__':
unittest.main()