From 4bda5c750d37806d8f1b02f8b407c93fac2ccfc4 Mon Sep 17 00:00:00 2001 From: Luca Moretti Date: Tue, 2 Mar 2021 15:32:15 +0100 Subject: [PATCH] Cleaned smells --- .../PyCTBN/estimators/fam_score_calculator.py | 9 +-------- .../structure_constraint_based_estimator.py | 4 +--- .../structure_score_based_estimator.py | 11 +---------- .../optimizers/constraint_based_optimizer.py | 3 --- PyCTBN/PyCTBN/optimizers/optimizer.py | 2 +- PyCTBN/PyCTBN/optimizers/tabu_search.py | 13 +------------ PyCTBN/PyCTBN/structure_graph/sample_path.py | 17 +++++++++-------- PyCTBN/PyCTBN/utility/abstract_importer.py | 1 - PyCTBN/PyCTBN/utility/cache.py | 4 ---- 9 files changed, 14 insertions(+), 50 deletions(-) diff --git a/PyCTBN/PyCTBN/estimators/fam_score_calculator.py b/PyCTBN/PyCTBN/estimators/fam_score_calculator.py index 5b0b591..d8ec3a0 100644 --- a/PyCTBN/PyCTBN/estimators/fam_score_calculator.py +++ b/PyCTBN/PyCTBN/estimators/fam_score_calculator.py @@ -252,14 +252,7 @@ class FamScoreCalculator: :return: the FamScore value of the node :rtype: float """ - #print("------") - #print(self.marginal_likelihood_q(cims, - # tau_xu, - # alpha_xu)) - - #print(self.marginal_likelihood_theta(cims, - # alpha_xu, - # alpha_xxu)) + 'calculate alpha_xxu as a uniform distribution' alpha_xxu = alpha_xu /(len(cims[0]._state_residence_times) - 1) diff --git a/PyCTBN/PyCTBN/estimators/structure_constraint_based_estimator.py b/PyCTBN/PyCTBN/estimators/structure_constraint_based_estimator.py index 822427c..aaf045f 100644 --- a/PyCTBN/PyCTBN/estimators/structure_constraint_based_estimator.py +++ b/PyCTBN/PyCTBN/estimators/structure_constraint_based_estimator.py @@ -141,8 +141,7 @@ class StructureConstraintBasedEstimator(StructureEstimator): r2s = M2.diagonal() C1 = cim1.cim C2 = cim2.cim - if child_states_numb > 2: - if (np.sum(np.diagonal(M1)) / thumb_value) < self._thumb_threshold: + if child_states_numb > 2 and (np.sum(np.diagonal(M1)) / thumb_value) < self._thumb_threshold: self._removable_edges_matrix[parent_indx][child_indx] = False return False F_stats = C2.diagonal() / C1.diagonal() @@ -225,7 +224,6 @@ class StructureConstraintBasedEstimator(StructureEstimator): list_edges_partial = executor.map(ctpc_algo, self._nodes, total_vars_numb_array) - #list_edges_partial = [ctpc_algo(n,total_vars_numb) for n in self._nodes] 'Update the graph' edges = set(itertools.chain.from_iterable(list_edges_partial)) diff --git a/PyCTBN/PyCTBN/estimators/structure_score_based_estimator.py b/PyCTBN/PyCTBN/estimators/structure_score_based_estimator.py index cf55d7c..358b6b8 100644 --- a/PyCTBN/PyCTBN/estimators/structure_score_based_estimator.py +++ b/PyCTBN/PyCTBN/estimators/structure_score_based_estimator.py @@ -121,15 +121,9 @@ class StructureScoreBasedEstimator(StructureEstimator): l_optimizer) - - #list_edges_partial = p.map(estimate_parents, self._nodes) - #list_edges_partial= estimate_parents('Q',max_parents,iterations_number,patience,tabu_length,tabu_rules_duration,optimizer) - 'Concatenate all the edges list' set_list_edges = set(itertools.chain.from_iterable(list_edges_partial)) - #print('-------------------------') - 'calculate precision and recall' n_missing_edges = 0 @@ -145,10 +139,7 @@ class StructureScoreBasedEstimator(StructureEstimator): precision = n_true_positive / (n_true_positive + n_added_fake_edges) recall = n_true_positive / (n_true_positive + n_missing_edges) - - - # print(f"n archi reali non trovati: {n_missing_edges}") - # print(f"n archi non reali aggiunti: {n_added_fake_edges}") + print(true_edges) print(set_list_edges) print(f"precision: {precision} ") diff --git a/PyCTBN/PyCTBN/optimizers/constraint_based_optimizer.py b/PyCTBN/PyCTBN/optimizers/constraint_based_optimizer.py index 65bc19c..9ad05fc 100644 --- a/PyCTBN/PyCTBN/optimizers/constraint_based_optimizer.py +++ b/PyCTBN/PyCTBN/optimizers/constraint_based_optimizer.py @@ -60,9 +60,6 @@ class ConstraintBasedOptimizer(Optimizer): u = other_nodes - #tests_parents_numb = len(u) - #complete_frame = self.complete_graph_frame - #test_frame = complete_frame.loc[complete_frame['To'].isin([self.node_id])] child_states_numb = self.structure_estimator._sample_path.structure.get_states_number(self.node_id) b = 0 while b < len(u): diff --git a/PyCTBN/PyCTBN/optimizers/optimizer.py b/PyCTBN/PyCTBN/optimizers/optimizer.py index 36445c0..1984f06 100644 --- a/PyCTBN/PyCTBN/optimizers/optimizer.py +++ b/PyCTBN/PyCTBN/optimizers/optimizer.py @@ -36,4 +36,4 @@ class Optimizer(abc.ABC): :return: the estimated structure for the node :rtype: List """ - pass + pass diff --git a/PyCTBN/PyCTBN/optimizers/tabu_search.py b/PyCTBN/PyCTBN/optimizers/tabu_search.py index 3a6fd92..b7b2bc9 100644 --- a/PyCTBN/PyCTBN/optimizers/tabu_search.py +++ b/PyCTBN/PyCTBN/optimizers/tabu_search.py @@ -140,20 +140,9 @@ class TabuSearch(Optimizer): graph.remove_edges([parent_removed]) graph.add_edges([current_edge]) added = True - #print('**************************') + current_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) - - # print("-------------------------------------------") - # print(f"Current new parent: {current_new_parent}") - # print(f"Current score: {current_score}") - # print(f"Current best score: {actual_best_score}") - # print(f"tabu list : {str(tabu_set)} length: {len(tabu_set)}") - # print(f"tabu queue : {str(tabu_queue)} length: {tabu_queue.qsize()}") - # print(f"graph edges: {graph.edges}") - - # print("-------------------------------------------") - # input() if current_score > actual_best_score: 'update current best score' actual_best_score = current_score diff --git a/PyCTBN/PyCTBN/structure_graph/sample_path.py b/PyCTBN/PyCTBN/structure_graph/sample_path.py index 80b51d9..8effce5 100644 --- a/PyCTBN/PyCTBN/structure_graph/sample_path.py +++ b/PyCTBN/PyCTBN/structure_graph/sample_path.py @@ -8,6 +8,7 @@ from .trajectory import Trajectory from ..utility.abstract_importer import AbstractImporter +MESSAGE_HAS_TO_CONTAIN_EXCEPTION = 'The importer object has to contain the all processed data!' class SamplePath(object): """Aggregates all the informations about the trajectories, the real structure of the sampled net and variables @@ -25,15 +26,15 @@ class SamplePath(object): """ self._importer = importer if self._importer._df_variables is None or self._importer._concatenated_samples is None: - raise RuntimeError('The importer object has to contain the all processed data!') + raise RuntimeError() if self._importer._df_variables.empty: - raise RuntimeError('The importer object has to contain the all processed data!') - if isinstance(self._importer._concatenated_samples, pd.DataFrame): - if self._importer._concatenated_samples.empty: - raise RuntimeError('The importer object has to contain the all processed data!') - if isinstance(self._importer._concatenated_samples, np.ndarray): - if self._importer._concatenated_samples.size == 0: - raise RuntimeError('The importer object has to contain the all processed data!') + raise RuntimeError(MESSAGE_HAS_TO_CONTAIN_EXCEPTION) + if isinstance(self._importer._concatenated_samples, pd.DataFrame) and\ + self._importer._concatenated_samples.empty: + raise RuntimeError(MESSAGE_HAS_TO_CONTAIN_EXCEPTION) + if isinstance(self._importer._concatenated_samples, np.ndarray) and\ + self._importer._concatenated_samples.size == 0: + raise RuntimeError(MESSAGE_HAS_TO_CONTAIN_EXCEPTION) self._trajectories = None self._structure = None self._total_variables_count = None diff --git a/PyCTBN/PyCTBN/utility/abstract_importer.py b/PyCTBN/PyCTBN/utility/abstract_importer.py index 1cad352..8984b4d 100644 --- a/PyCTBN/PyCTBN/utility/abstract_importer.py +++ b/PyCTBN/PyCTBN/utility/abstract_importer.py @@ -7,7 +7,6 @@ import pandas as pd import copy -#from sklearn.utils import resample class AbstractImporter(ABC): diff --git a/PyCTBN/PyCTBN/utility/cache.py b/PyCTBN/PyCTBN/utility/cache.py index 8e0369b..5191af3 100644 --- a/PyCTBN/PyCTBN/utility/cache.py +++ b/PyCTBN/PyCTBN/utility/cache.py @@ -30,10 +30,7 @@ class Cache: :rtype: SetOfCims """ try: - #print("Cache State:", self.list_of_sets_of_indxs) - #print("Look For:", parents_comb) result = self._actual_cache[self._list_of_sets_of_parents.index(parents_comb)] - #print("CACHE HIT!!!!", parents_comb) return result except ValueError: return None @@ -47,7 +44,6 @@ class Cache: :param socim: the related SetOfCims object :type socim: SetOfCims """ - #print("Putting in cache:", parents_comb) self._list_of_sets_of_parents.append(parents_comb) self._actual_cache.append(socim)