1
0
Fork 0

Add remove_node method; refactor complete_test method

better_develop
Filippo Martini 4 years ago
parent ccc562a1a6
commit c93574650f
  1. 78
      PyCTBN/PyCTBN/network_graph.py
  2. 37
      PyCTBN/PyCTBN/parameters_estimator.py
  3. 13
      PyCTBN/PyCTBN/structure.py
  4. 28
      PyCTBN/PyCTBN/structure_estimator.py
  5. 7
      PyCTBN/tests/performance_comparisons.py

@ -78,6 +78,24 @@ class NetworkGraph:
""" """
self._graph.add_edges_from(list_of_edges) self._graph.add_edges_from(list_of_edges)
def remove_node(self, node_id: str) -> None:
"""Remove the node ``node_id`` from all the class members.
Initialize all the filtering/indexing structures.
"""
self._graph.remove_node(node_id)
self._graph_struct.remove_node(node_id)
self.clear_indexing_filtering_structures()
def clear_indexing_filtering_structures(self) -> None:
"""Initialize all the filtering/indexing structures.
"""
self._aggregated_info_about_nodes_parents = None
self._time_scalar_indexing_structure = None
self._transition_scalar_indexing_structure = None
self._time_filtering = None
self._transition_filtering = None
self._p_combs_structure = None
def get_ordered_by_indx_set_of_parents(self, node: str) -> typing.Tuple: def get_ordered_by_indx_set_of_parents(self, node: str) -> typing.Tuple:
"""Builds the aggregated structure that holds all the infos relative to the parent set of the node, namely """Builds the aggregated structure that holds all the infos relative to the parent set of the node, namely
(parents_labels, parents_indexes, parents_cardinalities). (parents_labels, parents_indexes, parents_cardinalities).
@ -234,63 +252,3 @@ class NetworkGraph:
@property @property
def p_combs(self) -> np.ndarray: def p_combs(self) -> np.ndarray:
return self._p_combs_structure return self._p_combs_structure
"""
##############These Methods are actually unused but could become useful in the near future################
def init_graph(self):
self.add_nodes(self._nodes_labels)
self.add_edges(self._graph_struct.edges)
self._aggregated_info_about_nodes_parents = self.get_ord_set_of_par_of_all_nodes()
self._fancy_indexing = self.build_fancy_indexing_structure(0)
self.build_scalar_indexing_structures()
self.build_time_columns_filtering_structure()
self.build_transition_columns_filtering_structure()
self._p_combs_structure = self.build_p_combs_structure()
def build_time_columns_filtering_structure(self):
nodes_indxs = self._nodes_indexes
self._time_filtering = [np.append(np.array([node_indx], dtype=np.int), p_indxs).astype(np.int)
for node_indx, p_indxs in zip(nodes_indxs, self._fancy_indexing)]
def build_transition_columns_filtering_structure(self):
nodes_number = self._graph_struct.total_variables_number
nodes_indxs = self._nodes_indexes
self._transition_filtering = [np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int)
for node_indx, p_indxs in zip(nodes_indxs,
self._fancy_indexing)]
def build_scalar_indexing_structures(self):
parents_values_for_all_nodes = self.get_ordered_by_indx_parents_values_for_all_nodes()
build_transition_scalar_indexing_structure_for_a_node = \
self.build_transition_scalar_indexing_structure_for_a_node
build_time_scalar_indexing_structure_for_a_node = self.build_time_scalar_indexing_structure_for_a_node
aggr = [(build_transition_scalar_indexing_structure_for_a_node(node_id, p_vals),
build_time_scalar_indexing_structure_for_a_node(node_id, p_vals))
for node_id, p_vals in
zip(self._nodes_labels,
parents_values_for_all_nodes)]
self._transition_scalar_indexing_structure = [i[0] for i in aggr]
self._time_scalar_indexing_structure = [i[1] for i in aggr]
def build_p_combs_structure(self):
parents_values_for_all_nodes = self.get_ordered_by_indx_parents_values_for_all_nodes()
p_combs_struct = [self.build_p_comb_structure_for_a_node(p_vals) for p_vals in parents_values_for_all_nodes]
return p_combs_struct
def get_ord_set_of_par_of_all_nodes(self):
get_ordered_by_indx_set_of_parents = self.get_ordered_by_indx_set_of_parents
result = [get_ordered_by_indx_set_of_parents(node) for node in self._nodes_labels]
return result
def get_ordered_by_indx_parents_values_for_all_nodes(self):
pars_values = [i[2] for i in self._aggregated_info_about_nodes_parents]
return pars_values
def build_fancy_indexing_structure(self, start_indx):
if start_indx > 0:
pass
else:
fancy_indx = [i[1] for i in self._aggregated_info_about_nodes_parents]
return fancy_indx
"""

@ -45,20 +45,20 @@ class ParametersEstimator:
node_indx = self._net_graph.get_node_indx(node_id) node_indx = self._net_graph.get_node_indx(node_id)
state_res_times = self._single_set_of_cims._state_residence_times state_res_times = self._single_set_of_cims._state_residence_times
transition_matrices = self._single_set_of_cims._transition_matrices transition_matrices = self._single_set_of_cims._transition_matrices
self.compute_state_res_time_for_node(node_indx, self._trajectories.times, ParametersEstimator.compute_state_res_time_for_node(node_indx, self._trajectories.times,
self._trajectories.trajectory, self._trajectories.trajectory,
self._net_graph.time_filtering, self._net_graph.time_filtering,
self._net_graph.time_scalar_indexing_strucure, self._net_graph.time_scalar_indexing_strucure,
state_res_times) state_res_times)
self.compute_state_transitions_for_a_node(node_indx, ParametersEstimator.compute_state_transitions_for_a_node(node_indx, self._trajectories.complete_trajectory,
self._trajectories.complete_trajectory,
self._net_graph.transition_filtering, self._net_graph.transition_filtering,
self._net_graph.transition_scalar_indexing_structure, self._net_graph.transition_scalar_indexing_structure,
transition_matrices) transition_matrices)
self._single_set_of_cims.build_cims(state_res_times, transition_matrices) self._single_set_of_cims.build_cims(state_res_times, transition_matrices)
return self._single_set_of_cims return self._single_set_of_cims
def compute_state_res_time_for_node(self, node_indx: int, times: np.ndarray, trajectory: np.ndarray, @staticmethod
def compute_state_res_time_for_node(node_indx: int, times: np.ndarray, trajectory: np.ndarray,
cols_filter: np.ndarray, scalar_indexes_struct: np.ndarray, cols_filter: np.ndarray, scalar_indexes_struct: np.ndarray,
T: np.ndarray) -> None: T: np.ndarray) -> None:
"""Compute the state residence times for a node and fill the matrix ``T`` with the results """Compute the state residence times for a node and fill the matrix ``T`` with the results
@ -81,8 +81,9 @@ class ParametersEstimator:
times, times,
minlength=scalar_indexes_struct[-1]).reshape(-1, T.shape[1]) minlength=scalar_indexes_struct[-1]).reshape(-1, T.shape[1])
def compute_state_transitions_for_a_node(self, node_indx: int, trajectory: np.ndarray, cols_filter: np.ndarray, @staticmethod
scalar_indexing: np.ndarray, M: np.ndarray): def compute_state_transitions_for_a_node(node_indx: int, trajectory: np.ndarray, cols_filter: np.ndarray,
scalar_indexing: np.ndarray, M: np.ndarray) -> None:
"""Compute the state residence times for a node and fill the matrices ``M`` with the results. """Compute the state residence times for a node and fill the matrices ``M`` with the results.
:param node_indx: the index of the node :param node_indx: the index of the node
@ -105,30 +106,6 @@ class ParametersEstimator:
M_raveled[diag_indices] = 0 M_raveled[diag_indices] = 0
M_raveled[diag_indices] = np.sum(M, axis=2).ravel() M_raveled[diag_indices] = np.sum(M, axis=2).ravel()
"""
##############These Methods are actually unused but could become useful in the near future################
def init_sets_cims_container(self):
self.sets_of_cims_struct = acims.SetsOfCimsContainer(self._net_graph.nodes,
self._net_graph.nodes_values,
self._net_graph.
get_ordered_by_indx_parents_values_for_all_nodes(),
self._net_graph.p_combs)
def compute_parameters(self):
for indx, aggr in enumerate(zip(self._net_graph.nodes, self.sets_of_cims_struct.sets_of_cims)):
self.compute_state_res_time_for_node(self._net_graph.get_node_indx(aggr[0]), self.sample_path.trajectories.times,
self.sample_path.trajectories.trajectory,
self._net_graph.time_filtering[indx],
self._net_graph.time_scalar_indexing_strucure[indx],
aggr[1]._state_residence_times)
self.compute_state_transitions_for_a_node(self._net_graph.get_node_indx(aggr[0]),
self.sample_path.trajectories.complete_trajectory,
self._net_graph.transition_filtering[indx],
self._net_graph.transition_scalar_indexing_structure[indx],
aggr[1]._transition_matrices)
aggr[1].build_cims(aggr[1]._state_residence_times, aggr[1]._transition_matrices)
"""

@ -13,7 +13,7 @@ class Structure:
:type nodes_vals_arr: numpy.ndArray :type nodes_vals_arr: numpy.ndArray
:param edges_list: the edges of the network :param edges_list: the edges of the network
:type edges_list: List :type edges_list: List
:param total_variables_number: the total number of variables in the net :param total_variables_number: the total number of variables in the dataset
:type total_variables_number: int :type total_variables_number: int
""" """
@ -27,6 +27,17 @@ class Structure:
self._edges_list = edges_list self._edges_list = edges_list
self._total_variables_number = total_variables_number self._total_variables_number = total_variables_number
def remove_node(self, node_id: str) -> None:
"""Remove the node ``node_id`` from all the class members.
The class member ``_total_variables_number`` since it refers to the total number of variables in the dataset.
"""
node_positional_indx = self._nodes_labels_list.index(node_id)
del self._nodes_labels_list[node_positional_indx]
self._nodes_indexes_arr = np.delete(self._nodes_indexes_arr, node_positional_indx)
self._nodes_vals_arr = np.delete(self._nodes_vals_arr, node_positional_indx)
self._edges_list = [(from_node, to_node) for (from_node, to_node) in self._edges_list if (from_node != node_id
and to_node != node_id)]
@property @property
def edges(self) -> ty.List: def edges(self) -> ty.List:
return self._edges_list return self._edges_list

@ -85,25 +85,10 @@ class StructureEstimator:
parents = np.append(parents, test_parent) parents = np.append(parents, test_parent)
sorted_parents = self._nodes[np.isin(self._nodes, parents)] sorted_parents = self._nodes[np.isin(self._nodes, parents)]
cims_filter = sorted_parents != test_parent cims_filter = sorted_parents != test_parent
sofc1 = self._cache.find(set(p_set))
if not sofc1:
bool_mask1 = np.isin(self._nodes, complete_info)
l1 = list(self._nodes[bool_mask1])
indxs1 = self._nodes_indxs[bool_mask1]
vals1 = self._nodes_vals[bool_mask1]
eds1 = list(itertools.product(parent_set,test_child))
s1 = Structure(l1, indxs1, vals1, eds1, tot_vars_count)
g1 = NetworkGraph(s1)
g1.fast_init(test_child)
p1 = ParametersEstimator(self._sample_path.trajectories, g1)
p1.fast_init(test_child)
sofc1 = p1.compute_parameters_for_node(test_child)
self._cache.put(set(p_set), sofc1)
sofc2 = None
p_set.insert(0, test_parent) p_set.insert(0, test_parent)
if p_set:
sofc2 = self._cache.find(set(p_set)) sofc2 = self._cache.find(set(p_set))
if not sofc2: if not sofc2:
complete_info.append(test_parent) complete_info.append(test_parent)
bool_mask2 = np.isin(self._nodes, complete_info) bool_mask2 = np.isin(self._nodes, complete_info)
@ -118,6 +103,17 @@ class StructureEstimator:
p2.fast_init(test_child) p2.fast_init(test_child)
sofc2 = p2.compute_parameters_for_node(test_child) sofc2 = p2.compute_parameters_for_node(test_child)
self._cache.put(set(p_set), sofc2) self._cache.put(set(p_set), sofc2)
del p_set[0]
sofc1 = self._cache.find(set(p_set))
if not sofc1:
g2.remove_node(test_parent)
g2.fast_init(test_child)
p2 = ParametersEstimator(self._sample_path.trajectories, g2)
p2.fast_init(test_child)
sofc1 = p2.compute_parameters_for_node(test_child)
self._cache.put(set(p_set), sofc1)
for cim1, p_comb in zip(sofc1.actual_cims, sofc1.p_combs): for cim1, p_comb in zip(sofc1.actual_cims, sofc1.p_combs):
cond_cims = sofc2.filter_cims_with_mask(cims_filter, p_comb) cond_cims = sofc2.filter_cims_with_mask(cims_filter, p_comb)
for cim2 in cond_cims: for cim2 in cond_cims:

@ -59,11 +59,16 @@ class PerformanceComparisons(unittest.TestCase):
self.original_algo.prepare_trajectories(self.original_algo.df_samples_list, self.original_algo.variables) self.original_algo.prepare_trajectories(self.original_algo.df_samples_list, self.original_algo.variables)
def save_datas(self, original_list, opt_list): def save_datas(self, original_list, opt_list):
if not os.path.exists('results'):
os.makedirs('results')
df_results = pd.DataFrame({'orginal_execution_time': original_list, 'optimized_execution_time': opt_list}) df_results = pd.DataFrame({'orginal_execution_time': original_list, 'optimized_execution_time': opt_list})
name = self.importer.file_path.rsplit('/', 1)[-1] name = self.importer.file_path.rsplit('/', 1)[-1]
name = name.split('.', 1)[0] name = name.split('.', 1)[0]
name = 'execution_times_' + name + '.csv' name = 'execution_times_' + name + '.csv'
df_results.to_csv(name, index=False) path = os.path.abspath('./results/')
print(path)
file_dest = path + '/' + name
df_results.to_csv(file_dest, index=False)
if __name__ == '__main__': if __name__ == '__main__':