1
0
Fork 0

Cleaned smells

master
Luca Moretti 4 years ago
parent 788bdc73af
commit 4bda5c750d
  1. 9
      PyCTBN/PyCTBN/estimators/fam_score_calculator.py
  2. 4
      PyCTBN/PyCTBN/estimators/structure_constraint_based_estimator.py
  3. 11
      PyCTBN/PyCTBN/estimators/structure_score_based_estimator.py
  4. 3
      PyCTBN/PyCTBN/optimizers/constraint_based_optimizer.py
  5. 2
      PyCTBN/PyCTBN/optimizers/optimizer.py
  6. 13
      PyCTBN/PyCTBN/optimizers/tabu_search.py
  7. 17
      PyCTBN/PyCTBN/structure_graph/sample_path.py
  8. 1
      PyCTBN/PyCTBN/utility/abstract_importer.py
  9. 4
      PyCTBN/PyCTBN/utility/cache.py

@ -252,14 +252,7 @@ class FamScoreCalculator:
:return: the FamScore value of the node :return: the FamScore value of the node
:rtype: float :rtype: float
""" """
#print("------")
#print(self.marginal_likelihood_q(cims,
# tau_xu,
# alpha_xu))
#print(self.marginal_likelihood_theta(cims,
# alpha_xu,
# alpha_xxu))
'calculate alpha_xxu as a uniform distribution' 'calculate alpha_xxu as a uniform distribution'
alpha_xxu = alpha_xu /(len(cims[0]._state_residence_times) - 1) alpha_xxu = alpha_xu /(len(cims[0]._state_residence_times) - 1)

@ -141,8 +141,7 @@ class StructureConstraintBasedEstimator(StructureEstimator):
r2s = M2.diagonal() r2s = M2.diagonal()
C1 = cim1.cim C1 = cim1.cim
C2 = cim2.cim C2 = cim2.cim
if child_states_numb > 2: if child_states_numb > 2 and (np.sum(np.diagonal(M1)) / thumb_value) < self._thumb_threshold:
if (np.sum(np.diagonal(M1)) / thumb_value) < self._thumb_threshold:
self._removable_edges_matrix[parent_indx][child_indx] = False self._removable_edges_matrix[parent_indx][child_indx] = False
return False return False
F_stats = C2.diagonal() / C1.diagonal() F_stats = C2.diagonal() / C1.diagonal()
@ -225,7 +224,6 @@ class StructureConstraintBasedEstimator(StructureEstimator):
list_edges_partial = executor.map(ctpc_algo, list_edges_partial = executor.map(ctpc_algo,
self._nodes, self._nodes,
total_vars_numb_array) total_vars_numb_array)
#list_edges_partial = [ctpc_algo(n,total_vars_numb) for n in self._nodes]
'Update the graph' 'Update the graph'
edges = set(itertools.chain.from_iterable(list_edges_partial)) edges = set(itertools.chain.from_iterable(list_edges_partial))

@ -121,15 +121,9 @@ class StructureScoreBasedEstimator(StructureEstimator):
l_optimizer) l_optimizer)
#list_edges_partial = p.map(estimate_parents, self._nodes)
#list_edges_partial= estimate_parents('Q',max_parents,iterations_number,patience,tabu_length,tabu_rules_duration,optimizer)
'Concatenate all the edges list' 'Concatenate all the edges list'
set_list_edges = set(itertools.chain.from_iterable(list_edges_partial)) set_list_edges = set(itertools.chain.from_iterable(list_edges_partial))
#print('-------------------------')
'calculate precision and recall' 'calculate precision and recall'
n_missing_edges = 0 n_missing_edges = 0
@ -145,10 +139,7 @@ class StructureScoreBasedEstimator(StructureEstimator):
precision = n_true_positive / (n_true_positive + n_added_fake_edges) precision = n_true_positive / (n_true_positive + n_added_fake_edges)
recall = n_true_positive / (n_true_positive + n_missing_edges) recall = n_true_positive / (n_true_positive + n_missing_edges)
# print(f"n archi reali non trovati: {n_missing_edges}")
# print(f"n archi non reali aggiunti: {n_added_fake_edges}")
print(true_edges) print(true_edges)
print(set_list_edges) print(set_list_edges)
print(f"precision: {precision} ") print(f"precision: {precision} ")

@ -60,9 +60,6 @@ class ConstraintBasedOptimizer(Optimizer):
u = other_nodes u = other_nodes
#tests_parents_numb = len(u)
#complete_frame = self.complete_graph_frame
#test_frame = complete_frame.loc[complete_frame['To'].isin([self.node_id])]
child_states_numb = self.structure_estimator._sample_path.structure.get_states_number(self.node_id) child_states_numb = self.structure_estimator._sample_path.structure.get_states_number(self.node_id)
b = 0 b = 0
while b < len(u): while b < len(u):

@ -36,4 +36,4 @@ class Optimizer(abc.ABC):
:return: the estimated structure for the node :return: the estimated structure for the node
:rtype: List :rtype: List
""" """
pass pass

@ -140,20 +140,9 @@ class TabuSearch(Optimizer):
graph.remove_edges([parent_removed]) graph.remove_edges([parent_removed])
graph.add_edges([current_edge]) graph.add_edges([current_edge])
added = True added = True
#print('**************************')
current_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) current_score = self.structure_estimator.get_score_from_graph(graph,self.node_id)
# print("-------------------------------------------")
# print(f"Current new parent: {current_new_parent}")
# print(f"Current score: {current_score}")
# print(f"Current best score: {actual_best_score}")
# print(f"tabu list : {str(tabu_set)} length: {len(tabu_set)}")
# print(f"tabu queue : {str(tabu_queue)} length: {tabu_queue.qsize()}")
# print(f"graph edges: {graph.edges}")
# print("-------------------------------------------")
# input()
if current_score > actual_best_score: if current_score > actual_best_score:
'update current best score' 'update current best score'
actual_best_score = current_score actual_best_score = current_score

@ -8,6 +8,7 @@ from .trajectory import Trajectory
from ..utility.abstract_importer import AbstractImporter from ..utility.abstract_importer import AbstractImporter
MESSAGE_HAS_TO_CONTAIN_EXCEPTION = 'The importer object has to contain the all processed data!'
class SamplePath(object): class SamplePath(object):
"""Aggregates all the informations about the trajectories, the real structure of the sampled net and variables """Aggregates all the informations about the trajectories, the real structure of the sampled net and variables
@ -25,15 +26,15 @@ class SamplePath(object):
""" """
self._importer = importer self._importer = importer
if self._importer._df_variables is None or self._importer._concatenated_samples is None: if self._importer._df_variables is None or self._importer._concatenated_samples is None:
raise RuntimeError('The importer object has to contain the all processed data!') raise RuntimeError()
if self._importer._df_variables.empty: if self._importer._df_variables.empty:
raise RuntimeError('The importer object has to contain the all processed data!') raise RuntimeError(MESSAGE_HAS_TO_CONTAIN_EXCEPTION)
if isinstance(self._importer._concatenated_samples, pd.DataFrame): if isinstance(self._importer._concatenated_samples, pd.DataFrame) and\
if self._importer._concatenated_samples.empty: self._importer._concatenated_samples.empty:
raise RuntimeError('The importer object has to contain the all processed data!') raise RuntimeError(MESSAGE_HAS_TO_CONTAIN_EXCEPTION)
if isinstance(self._importer._concatenated_samples, np.ndarray): if isinstance(self._importer._concatenated_samples, np.ndarray) and\
if self._importer._concatenated_samples.size == 0: self._importer._concatenated_samples.size == 0:
raise RuntimeError('The importer object has to contain the all processed data!') raise RuntimeError(MESSAGE_HAS_TO_CONTAIN_EXCEPTION)
self._trajectories = None self._trajectories = None
self._structure = None self._structure = None
self._total_variables_count = None self._total_variables_count = None

@ -7,7 +7,6 @@ import pandas as pd
import copy import copy
#from sklearn.utils import resample
class AbstractImporter(ABC): class AbstractImporter(ABC):

@ -30,10 +30,7 @@ class Cache:
:rtype: SetOfCims :rtype: SetOfCims
""" """
try: try:
#print("Cache State:", self.list_of_sets_of_indxs)
#print("Look For:", parents_comb)
result = self._actual_cache[self._list_of_sets_of_parents.index(parents_comb)] result = self._actual_cache[self._list_of_sets_of_parents.index(parents_comb)]
#print("CACHE HIT!!!!", parents_comb)
return result return result
except ValueError: except ValueError:
return None return None
@ -47,7 +44,6 @@ class Cache:
:param socim: the related SetOfCims object :param socim: the related SetOfCims object
:type socim: SetOfCims :type socim: SetOfCims
""" """
#print("Putting in cache:", parents_comb)
self._list_of_sets_of_parents.append(parents_comb) self._list_of_sets_of_parents.append(parents_comb)
self._actual_cache.append(socim) self._actual_cache.append(socim)