1
0
Fork 0

Add other tests; optimized NetGraph Initialization

parallel_struct_est
philpMartin 4 years ago
parent b374d60117
commit 1b61c70f39
  1. 42
      main_package/classes/cache.py
  2. 19
      main_package/classes/conditional_intensity_matrix.py
  3. 2
      main_package/classes/json_importer.py
  4. 260
      main_package/classes/network_graph.py
  5. 133
      main_package/classes/parameters_estimator.py
  6. 79
      main_package/classes/set_of_cims.py
  7. 11
      main_package/classes/sets_of_cims_container.py
  8. 2
      main_package/classes/structure.py
  9. 56
      main_package/classes/structure_estimator.py
  10. 61
      main_package/tests/test_cache.py
  11. 171
      main_package/tests/test_networkgraph.py
  12. 83
      main_package/tests/test_parameters_estimator.py
  13. 109
      main_package/tests/test_setofcims.py

@ -3,26 +3,60 @@ import set_of_cims as sofc
class Cache: class Cache:
"""
This class has the role of a cache for SetOfCIMS of a test node that have been already computed during the ctpc algorithm.
:list_of_sets_of_parents: a list of Sets of the parents to which the cim in cache at SAME index is related
:actual_cache: a list of setOfCims objects
"""
def __init__(self): def __init__(self):
self.list_of_sets_of_indxs = [] self.list_of_sets_of_parents = []
self.actual_cache = [] self.actual_cache = []
def find(self, parents_comb: typing.Union[typing.Set, str]): def find(self, parents_comb: typing.Union[typing.Set, str]):
"""
Tries to find in cache given the symbolic parents combination parents_comb the SetOfCims related to that parents_comb.
N.B. if the parents_comb is not a Set, than the index refers to the SetOfCims of the actual node with no parents.
Parameters:
parents_comb: the parents related to that SetOfCims
Returns:
A SetOfCims object if the parents_comb index is found in list_of_sets_of_parents.
None otherwise.
"""
try: try:
#print("Cache State:", self.list_of_sets_of_indxs) #print("Cache State:", self.list_of_sets_of_indxs)
#print("Look For:", parents_comb) #print("Look For:", parents_comb)
result = self.actual_cache[self.list_of_sets_of_indxs.index(parents_comb)] result = self.actual_cache[self.list_of_sets_of_parents.index(parents_comb)]
print("CACHE HIT!!!!", parents_comb) print("CACHE HIT!!!!", parents_comb)
return result return result
except ValueError: except ValueError:
return None return None
def put(self, parents_comb: typing.Union[typing.Set, str], socim: sofc.SetOfCims): def put(self, parents_comb: typing.Union[typing.Set, str], socim: sofc.SetOfCims):
"""
Place in cache the SetOfCims object, and the related sybolyc index parents_comb in list_of_sets_of_parents
Parameters:
parents_comb: the symbolic set index
socim: the related SetOfCims object
Returns:
void
"""
#print("Putting in cache:", parents_comb) #print("Putting in cache:", parents_comb)
self.list_of_sets_of_indxs.append(parents_comb) self.list_of_sets_of_parents.append(parents_comb)
self.actual_cache.append(socim) self.actual_cache.append(socim)
def clear(self): def clear(self):
del self.list_of_sets_of_indxs[:] """
Clear the contents of both caches.
Parameters:
void
Returns:
void
"""
del self.list_of_sets_of_parents[:]
del self.actual_cache[:] del self.actual_cache[:]

@ -2,13 +2,28 @@ import numpy as np
class ConditionalIntensityMatrix: class ConditionalIntensityMatrix:
def __init__(self, state_residence_times, state_transition_matrix): """
Abstracts the Conditional Intesity matrix of a node as aggregation of the state residence times vector
and state transition matrix and the actual CIM matrix.
:_state_residence_times: state residence times vector
:_state_transition_matrix: the transitions count matrix
:_cim: the actual cim of the node
"""
def __init__(self, state_residence_times: np.array, state_transition_matrix: np.array):
self._state_residence_times = state_residence_times self._state_residence_times = state_residence_times
self._state_transition_matrix = state_transition_matrix self._state_transition_matrix = state_transition_matrix
#self.cim = np.zeros(shape=(dimension, dimension), dtype=float)
self._cim = self.state_transition_matrix.astype(np.float64) self._cim = self.state_transition_matrix.astype(np.float64)
def compute_cim_coefficients(self): def compute_cim_coefficients(self):
"""
Compute the coefficients of the matrix _cim by using the following equality q_xx' = M[x, x'] / T[x]
Parameters:
void
Returns:
void
"""
np.fill_diagonal(self._cim, self._cim.diagonal() * -1) np.fill_diagonal(self._cim, self._cim.diagonal() * -1)
self._cim = ((self._cim.T + 1) / (self._state_residence_times + 1)).T self._cim = ((self._cim.T + 1) / (self._state_residence_times + 1)).T

@ -237,7 +237,7 @@ class JsonImporter(AbstractImporter):
for indx in range(len(self.df_samples_list)): # Le singole traj non servono più #TODO usare list comprens for indx in range(len(self.df_samples_list)): # Le singole traj non servono più #TODO usare list comprens
self.df_samples_list[indx] = self.df_samples_list[indx].iloc[0:0] self.df_samples_list[indx] = self.df_samples_list[indx].iloc[0:0]
def import_sampled_cims(self, raw_data: pd.DataFrame, indx: int, cims_key: str) -> typing.Dict: def import_sampled_cims(self, raw_data: typing.List, indx: int, cims_key: str) -> typing.Dict:
cims_for_all_vars = {} cims_for_all_vars = {}
for var in raw_data[indx][cims_key]: for var in raw_data[indx][cims_key]:
sampled_cims_list = [] sampled_cims_list = []

@ -1,17 +1,27 @@
import networkx as nx import networkx as nx
import numpy as np import numpy as np
import typing
class NetworkGraph: class NetworkGraph:
""" """
Rappresenta il grafo che contiene i nodi e gli archi presenti nell'oggetto Structure graph_struct. Abstracts the infos contained in the Structure class in the form of a directed graph.
Ogni nodo contine la label node_id, al nodo è anche associato un id numerico progressivo indx che rappresenta la posizione Has the task of creating all the necessary filtering structures for parameters estimation
dei sui valori nella colonna indx della traj
:graph_struct: the Structure object from which infos about the net will be extracted
:graph_struct: l'oggetto Structure da cui estrarre i dati per costruire il grafo graph :graph: directed graph
:graph: il grafo :nodes_labels: the symbolic names of the variables
:nodes_indexes: the indexes of the nodes
:nodes_values: the cardinalites of the nodes
:aggregated_info_about_nodes_parents: a structure that contains all the necessary infos about every parents of every
node in the net
:_fancy_indexing: the indexes of every parent of every node in the net
:_time_scalar_indexing_structure: the indexing structure for state res time estimation
:_transition_scalar_indexing_structure: the indexing structure for transition computation
:_time_filtering: the columns filtering structure used in the computation of the state res times
:_transition_filtering: the columns filtering structure used in the computation of the transition from one state to another
:self._p_combs_structure: all the possibile parents states combination for every node in the net
""" """
def __init__(self, graph_struct): def __init__(self, graph_struct):
@ -37,71 +47,88 @@ class NetworkGraph:
self.build_time_columns_filtering_structure() self.build_time_columns_filtering_structure()
self.build_transition_columns_filtering_structure() self.build_transition_columns_filtering_structure()
self._p_combs_structure = self.build_p_combs_structure() self._p_combs_structure = self.build_p_combs_structure()
#ATTENZIONE LIST_OF_NODES DEVE ESSERE COERENTE CON L?ORDINAMENTO DEL DS
def add_nodes(self, list_of_nodes): def fast_init(self, node_id: str):
#self.graph.add_nodes_from(list_of_nodes) """
Initializes all the necessary structures for parameters estimation of the node identified by the label node_id
Parameters:
node_id: the label of the node
Returns:
void
"""
self.add_nodes(self._nodes_labels)
self.add_edges(self.graph_struct.edges)
self.aggregated_info_about_nodes_parents = self.get_ordered_by_indx_set_of_parents(node_id)
self._fancy_indexing = self.aggregated_info_about_nodes_parents[1]
p_indxs = self._fancy_indexing
p_vals = self.aggregated_info_about_nodes_parents[2]
self._time_scalar_indexing_structure = self.build_time_scalar_indexing_structure_for_a_node(node_id,
p_vals)
self._transition_scalar_indexing_structure = self.build_transition_scalar_indexing_structure_for_a_node(node_id,
p_vals)
node_indx = self.get_node_indx(node_id)
self._time_filtering = self.build_time_columns_filtering_for_a_node(node_indx, p_indxs)
self._transition_filtering = self.build_transition_filtering_for_a_node(node_indx, p_indxs)
self._p_combs_structure = self.build_p_comb_structure_for_a_node(p_vals)
def add_nodes(self, list_of_nodes: typing.List):
"""
Adds the nodes to the graph contained in the list of nodes list_of_nodes.
Sets all the properties that identify a nodes (index, positional index, cardinality)
Parameters:
list_of_nodes: the nodes to add to graph
Returns:
void
"""
nodes_indxs = self._nodes_indexes nodes_indxs = self._nodes_indexes
nodes_vals = self.graph_struct.nodes_values nodes_vals = self.graph_struct.nodes_values
pos = 0 pos = 0
#print("LIST OF NODES", list_of_nodes)
for id, node_indx, node_val in zip(list_of_nodes, nodes_indxs, nodes_vals): for id, node_indx, node_val in zip(list_of_nodes, nodes_indxs, nodes_vals):
self.graph.add_node(id, indx=node_indx, val=node_val, pos_indx=pos) self.graph.add_node(id, indx=node_indx, val=node_val, pos_indx=pos)
pos += 1 pos += 1
#set_node_attr(self.graph, {id:node_indx}, 'indx')
def add_edges(self, list_of_edges): def add_edges(self, list_of_edges: typing.List):
"""
Add the edges to the graph contained in the list list_of_edges.
Parameters:
list_of_edges
Returns:
void
"""
self.graph.add_edges_from(list_of_edges) self.graph.add_edges_from(list_of_edges)
def get_ordered_by_indx_set_of_parents(self, node): def get_ordered_by_indx_set_of_parents(self, node: str):
"""
Builds the aggregated structure that holds all the infos relative to the parent set of the node, namely
(parents_labels, parents_indexes, parents_cardinalities).
N.B. The parent set is sorted using the list of sorted nodes nodes
Parameters:
node: the label of the node
Returns:
a tuple containing all the parent set infos
"""
parents = self.get_parents_by_id(node) parents = self.get_parents_by_id(node)
#print("PARENTS", parents) nodes = self._nodes_labels
nodes = self.get_nodes()
#print("NODES", nodes)
d = {v: i for i, v in enumerate(nodes)} d = {v: i for i, v in enumerate(nodes)}
sorted_parents = sorted(parents, key=lambda v: d[v]) sorted_parents = sorted(parents, key=lambda v: d[v])
#sorted_parents = [x for _, x in sorted(zip(nodes, parents))]
#print("SORTED PARENTS IN GRAPH",sorted_parents)
#p_indxes= []
#p_values = []
get_node_indx = self.get_node_indx get_node_indx = self.get_node_indx
p_indxes = [get_node_indx(node) for node in sorted_parents] p_indxes = [get_node_indx(node) for node in sorted_parents]
#p_indxes.sort()
p_values = [self.get_states_number(node) for node in sorted_parents] p_values = [self.get_states_number(node) for node in sorted_parents]
#print("P INDXS", p_indxes)
#print("P VALS", p_values)
return (sorted_parents, p_indxes, p_values) return (sorted_parents, p_indxes, p_values)
def get_ord_set_of_par_of_all_nodes(self): def get_ord_set_of_par_of_all_nodes(self):
#result = []
#for node in self._nodes_labels:
#result.append(self.get_ordered_by_indx_set_of_parents(node))
get_ordered_by_indx_set_of_parents = self.get_ordered_by_indx_set_of_parents get_ordered_by_indx_set_of_parents = self.get_ordered_by_indx_set_of_parents
result = [get_ordered_by_indx_set_of_parents(node) for node in self._nodes_labels] result = [get_ordered_by_indx_set_of_parents(node) for node in self._nodes_labels]
return result return result
"""def get_ordered_by_indx_parents_values(self, node):
parents_values = []
parents = self.get_ordered_by_indx_set_of_parents(node)
for n in parents:
parents_values.append(self.graph_struct.get_states_number(n))
return parents_values"""
def get_ordered_by_indx_parents_values_for_all_nodes(self): def get_ordered_by_indx_parents_values_for_all_nodes(self):
"""result = []
for node in self._nodes_labels:
result.append(self.get_ordered_by_indx_parents_values(node))
return result"""
pars_values = [i[2] for i in self.aggregated_info_about_nodes_parents] pars_values = [i[2] for i in self.aggregated_info_about_nodes_parents]
return pars_values return pars_values
def get_states_number_of_all_nodes_sorted(self):
#states_number_list = []
#for node in self._nodes_labels:
#states_number_list.append(self.get_states_number(node))
#get_states_number = self.get_states_number
#states_number_list = [get_states_number(node) for node in self._nodes_labels]
return self._nodes_values
def build_fancy_indexing_structure(self, start_indx): def build_fancy_indexing_structure(self, start_indx):
if start_indx > 0: if start_indx > 0:
pass pass
@ -109,42 +136,95 @@ class NetworkGraph:
fancy_indx = [i[1] for i in self.aggregated_info_about_nodes_parents] fancy_indx = [i[1] for i in self.aggregated_info_about_nodes_parents]
return fancy_indx return fancy_indx
def build_time_scalar_indexing_structure_for_a_node(self, node_id: str, parents_vals: typing.List):
"""
Builds an indexing structure for the computation of state residence times values.
Parameters:
node_id: the node label
parents_vals: the caridinalites of the node's parents
Returns:
a numpy array.
def build_time_scalar_indexing_structure_for_a_node(self, node_id, parents_indxs): """
T_vector = np.array([self.get_states_number(node_id)]) T_vector = np.array([self.get_states_number(node_id)])
T_vector = np.append(T_vector, parents_indxs) T_vector = np.append(T_vector, parents_vals)
T_vector = T_vector.cumprod().astype(np.int) T_vector = T_vector.cumprod().astype(np.int)
# print(T_vector)
return T_vector return T_vector
def build_transition_scalar_indexing_structure_for_a_node(self, node_id, parents_indxs): def build_transition_scalar_indexing_structure_for_a_node(self, node_id: str, parents_vals: typing.List):
#M_vector = np.array([self.graph_struct.variables_frame.iloc[node_id, 1], """
#self.graph_struct.variables_frame.iloc[node_id, 1].astype(np.int)]) Builds an indexing structure for the computation of state transitions values.
Parameters:
node_id: the node label
parents_vals: the caridinalites of the node's parents
Returns:
a numpy array.
"""
node_states_number = self.get_states_number(node_id) node_states_number = self.get_states_number(node_id)
#get_states_number_by_indx = self.graph_struct.get_states_number_by_indx
M_vector = np.array([node_states_number, M_vector = np.array([node_states_number,
node_states_number]) node_states_number])
#M_vector = np.append(M_vector, [get_states_number_by_indx(x) for x in parents_indxs]) M_vector = np.append(M_vector, parents_vals)
M_vector = np.append(M_vector, parents_indxs)
M_vector = M_vector.cumprod().astype(np.int) M_vector = M_vector.cumprod().astype(np.int)
return M_vector return M_vector
def build_time_columns_filtering_for_a_node(self, node_indx: int, p_indxs: typing.List):
"""
Builds the necessary structure to filter the desired columns indicated by node_indx and p_indxs in the dataset.
This structute will be used in the computation of the state res times.
Parameters:
node_indx: the index of the node
p_indxs: the indexes of the node's parents
Returns:
a numpy array
"""
return np.append(np.array([node_indx], dtype=np.int), p_indxs).astype(np.int)
def build_transition_filtering_for_a_node(self, node_indx, p_indxs):
"""
Builds the necessary structure to filter the desired columns indicated by node_indx and p_indxs in the dataset.
This structute will be used in the computation of the state transitions values.
Parameters:
node_indx: the index of the node
p_indxs: the indexes of the node's parents
Returns:
a numpy array
"""
nodes_number = self.graph_struct.total_variables_number
return np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int)
def build_p_comb_structure_for_a_node(self, parents_values: typing.List):
"""
Builds the combinatory structure that contains the combinations of all the values contained in parents_values.
Parameters:
parents_values: the cardinalities of the nodes
Returns:
a numpy matrix containinga grid of the combinations
"""
tmp = []
for val in parents_values:
tmp.append([x for x in range(val)])
if len(parents_values) > 0:
parents_comb = np.array(np.meshgrid(*tmp)).T.reshape(-1, len(parents_values))
if len(parents_values) > 1:
tmp_comb = parents_comb[:, 1].copy()
parents_comb[:, 1] = parents_comb[:, 0].copy()
parents_comb[:, 0] = tmp_comb
else:
parents_comb = np.array([[]], dtype=np.int)
return parents_comb
def build_time_columns_filtering_structure(self): def build_time_columns_filtering_structure(self):
#parents_indexes_list = self._fancy_indexing
"""for node_indx, p_indxs in zip(self.graph_struct.list_of_nodes_indexes(), self._fancy_indexing):
self._time_filtering.append(np.append(np.array([node_indx], dtype=np.int), p_indxs).astype(np.int))"""
nodes_indxs = self._nodes_indexes nodes_indxs = self._nodes_indexes
#print("FINDXING", self._fancy_indexing)
#print("Nodes Indxs", nodes_indxs)
self._time_filtering = [np.append(np.array([node_indx], dtype=np.int), p_indxs).astype(np.int) self._time_filtering = [np.append(np.array([node_indx], dtype=np.int), p_indxs).astype(np.int)
for node_indx, p_indxs in zip(nodes_indxs, self._fancy_indexing)] for node_indx, p_indxs in zip(nodes_indxs, self._fancy_indexing)]
def build_transition_columns_filtering_structure(self): def build_transition_columns_filtering_structure(self):
#parents_indexes_list = self._fancy_indexing
nodes_number = self.graph_struct.total_variables_number nodes_number = self.graph_struct.total_variables_number
"""for node_indx, p_indxs in zip(self.graph_struct.list_of_nodes_indexes(), self._fancy_indexing):
self._transition_filtering.append(np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int))"""
nodes_indxs = self._nodes_indexes nodes_indxs = self._nodes_indexes
self._transition_filtering = [np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int) self._transition_filtering = [np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int)
for node_indx, p_indxs in zip(nodes_indxs, for node_indx, p_indxs in zip(nodes_indxs,
@ -154,9 +234,9 @@ class NetworkGraph:
parents_values_for_all_nodes = self.get_ordered_by_indx_parents_values_for_all_nodes() parents_values_for_all_nodes = self.get_ordered_by_indx_parents_values_for_all_nodes()
build_transition_scalar_indexing_structure_for_a_node = self.build_transition_scalar_indexing_structure_for_a_node build_transition_scalar_indexing_structure_for_a_node = self.build_transition_scalar_indexing_structure_for_a_node
build_time_scalar_indexing_structure_for_a_node = self.build_time_scalar_indexing_structure_for_a_node build_time_scalar_indexing_structure_for_a_node = self.build_time_scalar_indexing_structure_for_a_node
aggr = [(build_transition_scalar_indexing_structure_for_a_node(node_indx, p_indxs), aggr = [(build_transition_scalar_indexing_structure_for_a_node(node_id, p_vals),
build_time_scalar_indexing_structure_for_a_node(node_indx, p_indxs)) build_time_scalar_indexing_structure_for_a_node(node_id, p_vals))
for node_indx, p_indxs in for node_id, p_vals in
zip(self._nodes_labels, zip(self._nodes_labels,
parents_values_for_all_nodes)] parents_values_for_all_nodes)]
self._transition_scalar_indexing_structure = [i[0] for i in aggr] self._transition_scalar_indexing_structure = [i[0] for i in aggr]
@ -167,52 +247,40 @@ class NetworkGraph:
p_combs_struct = [self.build_p_comb_structure_for_a_node(p_vals) for p_vals in parents_values_for_all_nodes] p_combs_struct = [self.build_p_comb_structure_for_a_node(p_vals) for p_vals in parents_values_for_all_nodes]
return p_combs_struct return p_combs_struct
def build_p_comb_structure_for_a_node(self, parents_values):
tmp = []
for val in parents_values:
tmp.append([x for x in range(val)])
#print("TIMP", tmp)
if len(parents_values) > 0:
parents_comb = np.array(np.meshgrid(*tmp)).T.reshape(-1, len(parents_values))
#print("PArents COmb", parents_comb)
if len(parents_values) > 1:
tmp_comb = parents_comb[:, 1].copy()
#print(tmp_comb)
parents_comb[:, 1] = parents_comb[:, 0].copy()
parents_comb[:, 0] = tmp_comb
else:
parents_comb = np.array([[]], dtype=np.int)
return parents_comb
def get_nodes(self):
return list(self.graph.nodes)
def get_edges(self):
return list(self.graph.edges)
def get_nodes_sorted_by_indx(self):
return self.graph_struct.list_of_nodes_labels()
def get_parents_by_id(self, node_id): def get_parents_by_id(self, node_id):
return list(self.graph.predecessors(node_id)) return list(self.graph.predecessors(node_id))
def get_states_number(self, node_id): def get_states_number(self, node_id):
#return self.graph_struct.get_states_number(node_id)
return self.graph.nodes[node_id]['val'] return self.graph.nodes[node_id]['val']
"""
def get_states_number_by_indx(self, node_indx): def get_states_number_by_indx(self, node_indx):
return self.graph_struct.get_states_number_by_indx(node_indx) return self.graph_struct.get_states_number_by_indx(node_indx)
def get_node_by_index(self, node_indx): def get_node_by_index(self, node_indx):
return self.graph_struct.get_node_id(node_indx) return self.graph_struct.get_node_id(node_indx)"""
def get_node_indx(self, node_id): def get_node_indx(self, node_id):
return nx.get_node_attributes(self.graph, 'indx')[node_id] return nx.get_node_attributes(self.graph, 'indx')[node_id]
#return self.graph_struct.get_node_indx(node_id)
def get_positional_node_indx(self, node_id): def get_positional_node_indx(self, node_id):
return self.graph.nodes[node_id]['pos_indx'] return self.graph.nodes[node_id]['pos_indx']
@property
def nodes(self):
return self._nodes_labels
@property
def edges(self):
return list(self.graph.edges)
@property
def nodes_indexes(self):
return self._nodes_indexes
@property
def nodes_values(self):
return self._nodes_values
@property @property
def time_scalar_indexing_strucure(self): def time_scalar_indexing_strucure(self):
return self._time_scalar_indexing_structure return self._time_scalar_indexing_structure

@ -1,33 +1,52 @@
import os
from line_profiler import LineProfiler
from numba.experimental import jitclass
import numpy as np import numpy as np
import network_graph as ng
import sample_path as sp
import sets_of_cims_container as acims import sets_of_cims_container as acims
import set_of_cims as sofc
import sample_path as sp
import network_graph as ng
class ParametersEstimator: class ParametersEstimator:
"""
Has the task of computing the cims of particular node given the trajectories in samplepath and the net structure
in the graph net_graph
def __init__(self, sample_path, net_graph): :sample_path: the container of the trajectories
:net_graph: the net structure
:single_srt_of_cims: the set of cims object that will hold the cims of the node
"""
def __init__(self, sample_path: sp.SamplePath, net_graph: ng.NetworkGraph):
self.sample_path = sample_path self.sample_path = sample_path
self.net_graph = net_graph self.net_graph = net_graph
self.sets_of_cims_struct = None self.sets_of_cims_struct = None
self.single_set_of_cims = None
def init_sets_cims_container(self): def init_sets_cims_container(self):
self.sets_of_cims_struct = acims.SetsOfCimsContainer(self.net_graph.get_nodes(), self.sets_of_cims_struct = acims.SetsOfCimsContainer(self.net_graph.nodes,
self.net_graph.get_states_number_of_all_nodes_sorted(), self.net_graph.nodes_values,
self.net_graph.get_ordered_by_indx_parents_values_for_all_nodes(), self.net_graph.get_ordered_by_indx_parents_values_for_all_nodes(),
self.net_graph.p_combs) self.net_graph.p_combs)
def fast_init(self, node_id: str):
"""
Initializes all the necessary structures for the parameters estimation.
Parameters:
node_id: the node label
Returns:
void
"""
p_vals = self.net_graph.aggregated_info_about_nodes_parents[2]
node_states_number = self.net_graph.get_states_number(node_id)
self.single_set_of_cims = sofc.SetOfCims(node_id, p_vals, node_states_number, self.net_graph.p_combs)
def compute_parameters(self): def compute_parameters(self):
#print(self.net_graph.get_nodes()) #print(self.net_graph.get_nodes())
#print(self.amalgamated_cims_struct.sets_of_cims) #print(self.amalgamated_cims_struct.sets_of_cims)
#enumerate(zip(self.net_graph.get_nodes(), self.amalgamated_cims_struct.sets_of_cims)) #enumerate(zip(self.net_graph.get_nodes(), self.amalgamated_cims_struct.sets_of_cims))
for indx, aggr in enumerate(zip(self.net_graph.get_nodes(), self.sets_of_cims_struct.sets_of_cims)): for indx, aggr in enumerate(zip(self.net_graph.nodes, self.sets_of_cims_struct.sets_of_cims)):
#print(self.net_graph.time_filtering[indx]) #print(self.net_graph.time_filtering[indx])
#print(self.net_graph.time_scalar_indexing_strucure[indx]) #print(self.net_graph.time_scalar_indexing_strucure[indx])
self.compute_state_res_time_for_node(self.net_graph.get_node_indx(aggr[0]), self.sample_path.trajectories.times, self.compute_state_res_time_for_node(self.net_graph.get_node_indx(aggr[0]), self.sample_path.trajectories.times,
@ -43,7 +62,7 @@ class ParametersEstimator:
self.net_graph.transition_scalar_indexing_structure[indx], self.net_graph.transition_scalar_indexing_structure[indx],
aggr[1].transition_matrices) aggr[1].transition_matrices)
aggr[1].build_cims(aggr[1].state_residence_times, aggr[1].transition_matrices) aggr[1].build_cims(aggr[1].state_residence_times, aggr[1].transition_matrices)
#TODO togliere tutti sti self. """
def compute_parameters_for_node(self, node_id): def compute_parameters_for_node(self, node_id):
pos_index = self.net_graph.get_positional_node_indx(node_id) pos_index = self.net_graph.get_positional_node_indx(node_id)
node_indx = self.net_graph.get_node_indx(node_id) node_indx = self.net_graph.get_node_indx(node_id)
@ -64,57 +83,81 @@ class ParametersEstimator:
transition_matrices) transition_matrices)
self.sets_of_cims_struct.sets_of_cims[pos_index].build_cims( self.sets_of_cims_struct.sets_of_cims[pos_index].build_cims(
state_res_times, state_res_times,
transition_matrices) #TODO potrebbe restituire direttamente l'oggetto setof cims
"""
def compute_parameters_for_node(self, node_id: str):
"""
Compute the CIMS of the node identified by the label node_id
Parameters:
node_id: the node label
Returns:
A setOfCims object filled with the computed CIMS
"""
node_indx = self.net_graph.get_node_indx(node_id)
state_res_times = self.single_set_of_cims.state_residence_times
transition_matrices = self.single_set_of_cims.transition_matrices
trajectory = self.sample_path.trajectories.trajectory
self.compute_state_res_time_for_node(node_indx, self.sample_path.trajectories.times,
trajectory,
self.net_graph.time_filtering,
self.net_graph.time_scalar_indexing_strucure,
state_res_times)
self.compute_state_transitions_for_a_node(node_indx,
self.sample_path.trajectories.complete_trajectory,
self.net_graph.transition_filtering,
self.net_graph.transition_scalar_indexing_structure,
transition_matrices) transition_matrices)
self.single_set_of_cims.build_cims(state_res_times, transition_matrices)
def compute_state_res_time_for_node(self, node_indx, times, trajectory, cols_filter, scalar_indexes_struct, T): return self.single_set_of_cims
#print(times.size)
#print(trajectory) def compute_state_res_time_for_node(self, node_indx: int, times: np.ndarray, trajectory: np.ndarray,
#print(cols_filter) cols_filter: np.ndarray, scalar_indexes_struct: np.ndarray, T: np.ndarray):
#print(scalar_indexes_struct) """
#print(T) Compute the state residence times for a node and fill the matrix T with the results
Parameters:
node_indx: the index of the node
times: the times deltas vector
trajectory: the trajectory
cols_filter: the columns filtering structure
scalar_indexes_struct: the indexing structure
T: the state residence times vectors
Returns:
void
"""
T[:] = np.bincount(np.sum(trajectory[:, cols_filter] * scalar_indexes_struct / scalar_indexes_struct[0], axis=1) T[:] = np.bincount(np.sum(trajectory[:, cols_filter] * scalar_indexes_struct / scalar_indexes_struct[0], axis=1)
.astype(np.int), \ .astype(np.int), \
times, times,
minlength=scalar_indexes_struct[-1]).reshape(-1, T.shape[1]) minlength=scalar_indexes_struct[-1]).reshape(-1, T.shape[1])
#print("Done This NODE", T)
def compute_state_residence_time_for_all_nodes(self):
for node_indx, set_of_cims in enumerate(self.amalgamated_cims_struct.sets_of_cims):
self.compute_state_res_time_for_node(node_indx, self.sample_path.trajectories[0].get_times(),
self.sample_path.trajectories[0].get_trajectory(), self.columns_filtering_structure[node_indx],
self.scalar_indexes_converter[node_indx], set_of_cims.state_residence_times)
def compute_state_transitions_for_a_node(self, node_indx, trajectory, cols_filter, scalar_indexing, M): def compute_state_transitions_for_a_node(self, node_indx, trajectory, cols_filter, scalar_indexing, M):
#print(node_indx) """
#print(trajectory) Compute the state residence times for a node and fill the matrices M with the results
#print(cols_filter)
#print(scalar_indexing) Parameters:
#print(M) node_indx: the index of the node
times: the times deltas vector
trajectory: the trajectory
cols_filter: the columns filtering structure
scalar_indexes: the indexing structure
M: the state transition matrices
Returns:
void
"""
diag_indices = np.array([x * M.shape[1] + x % M.shape[1] for x in range(M.shape[0] * M.shape[1])], diag_indices = np.array([x * M.shape[1] + x % M.shape[1] for x in range(M.shape[0] * M.shape[1])],
dtype=np.int64) dtype=np.int64)
trj_tmp = trajectory[trajectory[:, int(trajectory.shape[1] / 2) + node_indx].astype(np.int) >= 0] trj_tmp = trajectory[trajectory[:, int(trajectory.shape[1] / 2) + node_indx].astype(np.int) >= 0]
#print(trj_tmp)
#print("Summing", np.sum(trj_tmp[:, cols_filter] * scalar_indexing / scalar_indexing[0], axis=1).astype(np.int))
#print(M.shape[1])
#print(M.shape[2])
M[:] = np.bincount(np.sum(trj_tmp[:, cols_filter] * scalar_indexing / scalar_indexing[0], axis=1).astype(np.int), M[:] = np.bincount(np.sum(trj_tmp[:, cols_filter] * scalar_indexing / scalar_indexing[0], axis=1).astype(np.int),
minlength=scalar_indexing[-1]).reshape(-1, M.shape[1], M.shape[2]) minlength=scalar_indexing[-1]).reshape(-1, M.shape[1], M.shape[2])
#print(M)
M_raveled = M.ravel() M_raveled = M.ravel()
M_raveled[diag_indices] = 0 M_raveled[diag_indices] = 0
#print(M_raveled)
M_raveled[diag_indices] = np.sum(M, axis=2).ravel() M_raveled[diag_indices] = np.sum(M, axis=2).ravel()
#print(M_raveled)
#print(M)
def compute_state_transitions_for_all_nodes(self):
for node_indx, set_of_cims in enumerate(self.amalgamated_cims_struct.sets_of_cims):
self.compute_state_transitions_for_a_node(node_indx, self.sample_path.trajectories[0].get_complete_trajectory(),
self.transition_filtering[node_indx],
self.transition_scalar_index_converter[node_indx], set_of_cims.transition_matrices)

@ -1,18 +1,23 @@
import numpy as np import numpy as np
import typing
import conditional_intensity_matrix as cim import conditional_intensity_matrix as cim
class SetOfCims: class SetOfCims:
""" """
Rappresenta la struttura che aggrega tutte le CIM per la variabile di label node_id Aggregates all the CIMS of the node identified by the label node_id.
:node_id: la label della varibile a cui fanno riferimento le CIM :node_id: the node label
:ordered_parent_set: il set dei parent della variabile node_id ordinata secondo la property indx :parents_states_number: the cardinalities of the parents
:value: il numero massimo di stati assumibili dalla variabile :node_states_number: the caridinality of the node
:actual_cims: le CIM della varibile :p_combs: the relative p_comb structure
:state_residence_time: matrix containing all the state residence time vectors for the node
:transition_matrices: matrix containing all the transition matrices for the node
:actaul_cims: the cims of the node
""" """
def __init__(self, node_id, parents_states_number, node_states_number, p_combs): def __init__(self, node_id: str, parents_states_number: typing.List, node_states_number: int, p_combs: np.ndarray):
self.node_id = node_id self.node_id = node_id
self.parents_states_number = parents_states_number self.parents_states_number = parents_states_number
self.node_states_number = node_states_number self.node_states_number = node_states_number
@ -20,9 +25,17 @@ class SetOfCims:
self.state_residence_times = None self.state_residence_times = None
self.transition_matrices = None self.transition_matrices = None
self.p_combs = p_combs self.p_combs = p_combs
self.build_actual_cims_structure() self.build_times_and_transitions_structures()
def build_times_and_transitions_structures(self):
"""
Initializes at the correct dimensions the state residence times matrix and the state transition matrices
def build_actual_cims_structure(self): Parameters:
void
Returns:
void
"""
if not self.parents_states_number: if not self.parents_states_number:
self.state_residence_times = np.zeros((1, self.node_states_number), dtype=np.float) self.state_residence_times = np.zeros((1, self.node_states_number), dtype=np.float)
self.transition_matrices = np.zeros((1,self.node_states_number, self.node_states_number), dtype=np.int) self.transition_matrices = np.zeros((1,self.node_states_number, self.node_states_number), dtype=np.int)
@ -32,49 +45,41 @@ class SetOfCims:
self.transition_matrices = np.zeros([np.prod(self.parents_states_number), self.node_states_number, self.transition_matrices = np.zeros([np.prod(self.parents_states_number), self.node_states_number,
self.node_states_number], dtype=np.int) self.node_states_number], dtype=np.int)
def build_cims(self, state_res_times: typing.List, transition_matrices: typing.List):
def get_cims_number(self):
return len(self.actual_cims)
def indexes_converter(self, indexes): # Si aspetta array del tipo [2,2] dove
assert len(indexes) == len(self.parents_states_number)
vector_index = 0
if not indexes:
return vector_index
else:
for indx, value in enumerate(indexes):
vector_index = vector_index*self.parents_states_number[indx] + indexes[indx]
return vector_index
def build_cims(self, state_res_times, transition_matrices):
for state_res_time_vector, transition_matrix in zip(state_res_times, transition_matrices): for state_res_time_vector, transition_matrix in zip(state_res_times, transition_matrices):
#print(state_res_time_vector, transition_matrix)
cim_to_add = cim.ConditionalIntensityMatrix(state_res_time_vector, transition_matrix) cim_to_add = cim.ConditionalIntensityMatrix(state_res_time_vector, transition_matrix)
cim_to_add.compute_cim_coefficients() cim_to_add.compute_cim_coefficients()
#print(cim_to_add)
self.actual_cims.append(cim_to_add) self.actual_cims.append(cim_to_add)
self.actual_cims = np.array(self.actual_cims) self.actual_cims = np.array(self.actual_cims)
self.transition_matrices = None self.transition_matrices = None
self.state_residence_times = None self.state_residence_times = None
def filter_cims_with_mask(self, mask_arr: np.ndarray, comb: typing.List):
if mask_arr.size <= 1:
return self.actual_cims
else:
tmp_parents_comb_from_ids = np.argwhere(np.all(self.p_combs[:, mask_arr] == comb, axis=1)).ravel()
return self.actual_cims[tmp_parents_comb_from_ids]
@property
def get_cims(self): def get_cims(self):
return self.actual_cims return self.actual_cims
def get_cims_number(self):
return len(self.actual_cims)
"""
def get_cim(self, index): def get_cim(self, index):
flat_index = self.indexes_converter(index) flat_index = self.indexes_converter(index)
return self.actual_cims[flat_index] return self.actual_cims[flat_index]
def filter_cims_with_mask(self, mask_arr, comb): def indexes_converter(self, indexes):
if mask_arr.size <= 1: assert len(indexes) == len(self.parents_states_number)
return self.actual_cims vector_index = 0
if not indexes:
return vector_index
else: else:
tmp_parents_comb_from_ids = np.argwhere(np.all(self.p_combs[:, mask_arr] == comb, axis=1)).ravel() for indx, value in enumerate(indexes):
#print("CIMS INDEXES TO USE!",tmp_parents_comb_from_ids) vector_index = vector_index*self.parents_states_number[indx] + indexes[indx]
return self.actual_cims[tmp_parents_comb_from_ids] return vector_index"""
"""sofc = SetOfCims('Z', [3, 3], 3)
sofc.build_actual_cims_structure()
print(sofc.actual_cims)
print(sofc.actual_cims[0,0])
print(sofc.actual_cims[1,2])
#print(sofc.indexes_converter([]))"""

@ -1,7 +1,6 @@
import set_of_cims as socim import set_of_cims as socim
class SetsOfCimsContainer: class SetsOfCimsContainer:
""" """
Aggrega un insieme di oggetti SetOfCims Aggrega un insieme di oggetti SetOfCims
@ -24,13 +23,3 @@ class SetsOfCimsContainer:
def get_cims_of_node(self, node_indx, cim_indx): def get_cims_of_node(self, node_indx, cim_indx):
return self.sets_of_cims[node_indx].get_cim(cim_indx) return self.sets_of_cims[node_indx].get_cim(cim_indx)
"""
def get_vars_order(self, node):
return self.actual_cims[node][1]
def update_state_transition_for_matrix(self, node, which_matrix, element_indx):
self.sets_of_cims[node].update_state_transition(which_matrix, element_indx)
def update_state_residence_time_for_matrix(self, which_node, which_matrix, which_element, time):
self.sets_of_cims[which_node].update_state_residence_time(which_matrix, which_element, time)
"""

@ -72,7 +72,7 @@ class Structure:
return set(self._nodes_labels_list) == set(other._nodes_labels_list) and \ return set(self._nodes_labels_list) == set(other._nodes_labels_list) and \
np.array_equal(self._nodes_vals_arr, other._nodes_vals_arr) and \ np.array_equal(self._nodes_vals_arr, other._nodes_vals_arr) and \
np.array_equal(self._nodes_indexes_arr, other._nodes_indexes_arr) and \ np.array_equal(self._nodes_indexes_arr, other._nodes_indexes_arr) and \
set(self._edges_list) == set(other._edges_list) self._edges_list == other._edges_list
return NotImplemented return NotImplemented

@ -1,10 +1,11 @@
import pandas as pd
import numpy as np import numpy as np
import itertools import itertools
import networkx as nx import networkx as nx
from scipy.stats import f as f_dist from scipy.stats import f as f_dist
from scipy.stats import chi2 as chi2_dist from scipy.stats import chi2 as chi2_dist
import sample_path as sp
import structure as st import structure as st
import network_graph as ng import network_graph as ng
import parameters_estimator as pe import parameters_estimator as pe
@ -12,8 +13,21 @@ import cache as ch
class StructureEstimator: class StructureEstimator:
"""
Has the task of estimating the network structure given the trajectories in samplepath.
:sample_path: the sample_path object containing the trajectories and the real structure
:exp_test_sign: the significance level for the exponential Hp test
:chi_test_alfa: the significance level for the chi Hp test
:nodes: the nodes labels
:nodes_vals: the nodes cardinalities
:nodes_indxs: the nodes indexes
:complete_graph: the complete directed graph built using the nodes labels in nodes
:cache: the cache object
"""
def __init__(self, sample_path, exp_test_alfa, chi_test_alfa): def __init__(self, sample_path: sp.SamplePath, exp_test_alfa: float, chi_test_alfa: float):
self.sample_path = sample_path self.sample_path = sample_path
self.nodes = np.array(self.sample_path.structure.nodes_labels) self.nodes = np.array(self.sample_path.structure.nodes_labels)
#print("NODES", self.nodes) #print("NODES", self.nodes)
@ -26,11 +40,6 @@ class StructureEstimator:
self.chi_test_alfa = chi_test_alfa self.chi_test_alfa = chi_test_alfa
self.cache = ch.Cache() self.cache = ch.Cache()
def build_complete_graph_frame(self, node_ids):
complete_frame = pd.DataFrame(itertools.permutations(node_ids, 2))
complete_frame.columns = ['From', 'To']
return complete_frame
def build_complete_graph(self, node_ids): def build_complete_graph(self, node_ids):
complete_graph = nx.DiGraph() complete_graph = nx.DiGraph()
complete_graph.add_nodes_from(node_ids) complete_graph.add_nodes_from(node_ids)
@ -53,13 +62,13 @@ class StructureEstimator:
cims_filter = sorted_parents != test_parent cims_filter = sorted_parents != test_parent
#print("PARENTS NO FROM MASK", cims_filter) #print("PARENTS NO FROM MASK", cims_filter)
if not p_set: if not p_set:
print("EMPTY PSET TRYING TO FIND", test_child) #print("EMPTY PSET TRYING TO FIND", test_child)
sofc1 = self.cache.find(test_child) sofc1 = self.cache.find(test_child)
else: else:
sofc1 = self.cache.find(set(p_set)) sofc1 = self.cache.find(set(p_set))
if not sofc1: if not sofc1:
print("CACHE MISSS SOFC1") #print("CACHE MISSS SOFC1")
bool_mask1 = np.isin(self.nodes,complete_info) bool_mask1 = np.isin(self.nodes,complete_info)
#print("Bool mask 1", bool_mask1) #print("Bool mask 1", bool_mask1)
l1 = list(self.nodes[bool_mask1]) l1 = list(self.nodes[bool_mask1])
@ -70,7 +79,8 @@ class StructureEstimator:
eds1 = list(itertools.product(parent_set,test_child)) eds1 = list(itertools.product(parent_set,test_child))
s1 = st.Structure(l1, indxs1, vals1, eds1, tot_vars_count) s1 = st.Structure(l1, indxs1, vals1, eds1, tot_vars_count)
g1 = ng.NetworkGraph(s1) g1 = ng.NetworkGraph(s1)
g1.init_graph() #g1.init_graph()
g1.fast_init(test_child)
#print("M Vector", g1.transition_scalar_indexing_structure) #print("M Vector", g1.transition_scalar_indexing_structure)
#print("Time Vecotr", g1.time_scalar_indexing_strucure) #print("Time Vecotr", g1.time_scalar_indexing_strucure)
#print("Time Filter", g1.time_filtering) #print("Time Filter", g1.time_filtering)
@ -78,10 +88,11 @@ class StructureEstimator:
#print("G1 NODES", g1.get_nodes()) #print("G1 NODES", g1.get_nodes())
#print("G1 Edges", g1.get_edges()) #print("G1 Edges", g1.get_edges())
p1 = pe.ParametersEstimator(self.sample_path, g1) p1 = pe.ParametersEstimator(self.sample_path, g1)
p1.init_sets_cims_container() #p1.init_sets_cims_container()
p1.compute_parameters_for_node(test_child) p1.fast_init(test_child)
sofc1 = p1.compute_parameters_for_node(test_child)
sofc1 = p1.sets_of_cims_struct.sets_of_cims[g1.get_positional_node_indx(test_child)] #sofc1 = p1.sets_of_cims_struct.sets_of_cims[g1.get_positional_node_indx(test_child)]
if not p_set: if not p_set:
self.cache.put(test_child, sofc1) self.cache.put(test_child, sofc1)
else: else:
@ -90,7 +101,7 @@ class StructureEstimator:
#p_set.append(test_parent) #p_set.append(test_parent)
p_set.insert(0, test_parent) p_set.insert(0, test_parent)
if p_set: if p_set:
print("FULL PSET TRYING TO FIND", p_set) #print("FULL PSET TRYING TO FIND", p_set)
#p_set.append(test_parent) #p_set.append(test_parent)
#print("PSET ", p_set) #print("PSET ", p_set)
#set_p_set = set(p_set) #set_p_set = set(p_set)
@ -105,7 +116,7 @@ class StructureEstimator:
p2.compute_parameters_for_node(test_child) p2.compute_parameters_for_node(test_child)
sofc2 = p2.sets_of_cims_struct.sets_of_cims[s2.get_positional_node_indx(test_child)]""" sofc2 = p2.sets_of_cims_struct.sets_of_cims[s2.get_positional_node_indx(test_child)]"""
if not sofc2: if not sofc2:
print("Cache MISSS SOFC2") #print("Cache MISSS SOFC2")
complete_info.append(test_parent) complete_info.append(test_parent)
bool_mask2 = np.isin(self.nodes, complete_info) bool_mask2 = np.isin(self.nodes, complete_info)
#print("BOOL MASK 2",bool_mask2) #print("BOOL MASK 2",bool_mask2)
@ -117,7 +128,8 @@ class StructureEstimator:
eds2 = list(itertools.product(p_set, test_child)) eds2 = list(itertools.product(p_set, test_child))
s2 = st.Structure(l2, indxs2, vals2, eds2, tot_vars_count) s2 = st.Structure(l2, indxs2, vals2, eds2, tot_vars_count)
g2 = ng.NetworkGraph(s2) g2 = ng.NetworkGraph(s2)
g2.init_graph() #g2.init_graph()
g2.fast_init(test_child)
#print("M Vector", g2.transition_scalar_indexing_structure) #print("M Vector", g2.transition_scalar_indexing_structure)
#print("Time Vecotr", g2.time_scalar_indexing_strucure) #print("Time Vecotr", g2.time_scalar_indexing_strucure)
#print("Time Filter", g2.time_filtering) #print("Time Filter", g2.time_filtering)
@ -125,9 +137,10 @@ class StructureEstimator:
#print("G2 Nodes", g2.get_nodes()) #print("G2 Nodes", g2.get_nodes())
#print("G2 Edges", g2.get_edges()) #print("G2 Edges", g2.get_edges())
p2 = pe.ParametersEstimator(self.sample_path, g2) p2 = pe.ParametersEstimator(self.sample_path, g2)
p2.init_sets_cims_container() #p2.init_sets_cims_container()
p2.compute_parameters_for_node(test_child) p2.fast_init(test_child)
sofc2 = p2.sets_of_cims_struct.sets_of_cims[g2.get_positional_node_indx(test_child)] sofc2 = p2.compute_parameters_for_node(test_child)
#sofc2 = p2.sets_of_cims_struct.sets_of_cims[g2.get_positional_node_indx(test_child)]
#if p_set: #if p_set:
#set_p_set = set(p_set) #set_p_set = set(p_set)
self.cache.put(set(p_set), sofc2) self.cache.put(set(p_set), sofc2)
@ -235,12 +248,7 @@ class StructureEstimator:
def generate_possible_sub_sets_of_size(self, u, size, parent_indx): def generate_possible_sub_sets_of_size(self, u, size, parent_indx):
list_without_test_parent = u[:] list_without_test_parent = u[:]
#del list_without_test_parent[parent_indx]
#print("U", u)
#print("Szie", size)
#print("parent indx", parent_indx)
list_without_test_parent.remove(parent_indx) list_without_test_parent.remove(parent_indx)
#print(list(map(list, itertools.combinations(list_without_test_parent, size))))
return map(list, itertools.combinations(list_without_test_parent, size)) return map(list, itertools.combinations(list_without_test_parent, size))
def remove_diagonal_elements(self, matrix): def remove_diagonal_elements(self, matrix):

@ -0,0 +1,61 @@
import unittest
import numpy as np
import cache as ch
import set_of_cims as soci
class TestCache(unittest.TestCase):
def test_init(self):
c1 = ch.Cache()
self.assertFalse(c1.list_of_sets_of_parents)
self.assertFalse(c1.actual_cache)
def test_put(self):
c1 = ch.Cache()
pset1 = {'X', 'Y'}
sofc1 = soci.SetOfCims('Z', [], 3, np.array([]))
c1.put(pset1, sofc1)
self.assertEqual(1, len(c1.actual_cache))
self.assertEqual(1, len(c1.list_of_sets_of_parents))
self.assertEqual(sofc1, c1.actual_cache[0])
pset2 = {'X'}
sofc2 = soci.SetOfCims('Z', [], 3, np.array([]))
c1.put(pset2, sofc2)
self.assertEqual(2, len(c1.actual_cache))
self.assertEqual(2, len(c1.list_of_sets_of_parents))
self.assertEqual(sofc2, c1.actual_cache[1])
def test_find(self):
c1 = ch.Cache()
pset1 = {'X', 'Y'}
sofc1 = soci.SetOfCims('Z', [], 3, np.array([]))
c1.put(pset1, sofc1)
self.assertEqual(1, len(c1.actual_cache))
self.assertEqual(1, len(c1.list_of_sets_of_parents))
self.assertIsInstance(c1.find(pset1), soci.SetOfCims)
self.assertEqual(sofc1, c1.find(pset1))
self.assertIsInstance(c1.find({'Y', 'X'}), soci.SetOfCims)
self.assertEqual(sofc1, c1.find({'Y', 'X'}))
self.assertIsNone(c1.find({'X'}))
def test_clear(self):
c1 = ch.Cache()
pset1 = {'X', 'Y'}
sofc1 = soci.SetOfCims('Z', [], 3, np.array([]))
c1.put(pset1, sofc1)
self.assertEqual(1, len(c1.actual_cache))
self.assertEqual(1, len(c1.list_of_sets_of_parents))
c1.clear()
self.assertFalse(c1.list_of_sets_of_parents)
self.assertFalse(c1.actual_cache)
if __name__ == '__main__':
unittest.main()

@ -1,12 +1,14 @@
import unittest import unittest
import networkx as nx import networkx as nx
import numpy as np import numpy as np
import itertools
from line_profiler import LineProfiler from line_profiler import LineProfiler
import sample_path as sp import sample_path as sp
import network_graph as ng import network_graph as ng
class TestNetworkGraph(unittest.TestCase): class TestNetworkGraph(unittest.TestCase):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
@ -18,26 +20,26 @@ class TestNetworkGraph(unittest.TestCase):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
self.assertEqual(self.s1.structure, g1.graph_struct) self.assertEqual(self.s1.structure, g1.graph_struct)
self.assertIsInstance(g1.graph, nx.DiGraph) self.assertIsInstance(g1.graph, nx.DiGraph)
#TODO MANCANO TUTTI I TEST DI INIZIALIZZAZIONE DEI DATI PRIVATI della classe aggiungere le property necessarie self.assertTrue(np.array_equal(g1._nodes_indexes, self.s1.structure.nodes_indexes))
self.assertListEqual(g1._nodes_labels, self.s1.structure.nodes_labels)
self.assertTrue(np.array_equal(g1._nodes_values, self.s1.structure.nodes_values))
self.assertIsNone(g1._fancy_indexing)
self.assertIsNone(g1.time_scalar_indexing_strucure)
self.assertIsNone(g1.transition_scalar_indexing_structure)
self.assertIsNone(g1.transition_filtering)
self.assertIsNone(g1.p_combs)
def test_add_nodes(self): def test_add_nodes(self):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.list_of_nodes_labels()) g1.add_nodes(self.s1.structure.nodes_labels)
for n1, n2 in zip(g1.get_nodes(), self.s1.structure.list_of_nodes_labels()): for n1, n2 in zip(g1.nodes, self.s1.structure.nodes_labels):
self.assertEqual(n1, n2) self.assertEqual(n1, n2)
def test_add_edges(self): def test_add_edges(self):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
g1.add_edges(self.s1.structure.list_of_edges()) g1.add_edges(self.s1.structure.edges)
for e in self.s1.structure.list_of_edges(): for e in self.s1.structure.edges:
self.assertIn(tuple(e), g1.get_edges()) self.assertIn(tuple(e), g1.edges)
"""def test_get_ordered_by_indx_set_of_parents(self):
g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.list_of_nodes_labels())
g1.add_edges(self.s1.structure.list_of_edges())
sorted_par_list_aggregated_info = g1.get_ordered_by_indx_set_of_parents(g1.get_nodes()[2])
self.test_aggregated_par_list_data(g1, g1.get_nodes()[2], sorted_par_list_aggregated_info)"""
def aux_aggregated_par_list_data(self, graph, node_id, sorted_par_list_aggregated_info): def aux_aggregated_par_list_data(self, graph, node_id, sorted_par_list_aggregated_info):
for indx, element in enumerate(sorted_par_list_aggregated_info): for indx, element in enumerate(sorted_par_list_aggregated_info):
@ -55,12 +57,12 @@ class TestNetworkGraph(unittest.TestCase):
def test_get_ord_set_of_par_of_all_nodes(self): def test_get_ord_set_of_par_of_all_nodes(self):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.list_of_nodes_labels()) g1.add_nodes(self.s1.structure.nodes_labels)
g1.add_edges(self.s1.structure.list_of_edges()) g1.add_edges(self.s1.structure.edges)
sorted_list_of_par_lists = g1.get_ord_set_of_par_of_all_nodes() sorted_list_of_par_lists = g1.get_ord_set_of_par_of_all_nodes()
for node, par_list in zip(g1.get_nodes_sorted_by_indx(), sorted_list_of_par_lists): for node, par_list in zip(g1.nodes, sorted_list_of_par_lists):
self.aux_aggregated_par_list_data(g1, node, par_list) self.aux_aggregated_par_list_data(g1, node, par_list)
"""
def test_get_ordered_by_indx_parents_values_for_all_nodes(self): def test_get_ordered_by_indx_parents_values_for_all_nodes(self):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.list_of_nodes_labels()) g1.add_nodes(self.s1.structure.list_of_nodes_labels())
@ -89,67 +91,67 @@ class TestNetworkGraph(unittest.TestCase):
self.assertEqual(par_indxs, aggr[1]) self.assertEqual(par_indxs, aggr[1])
def test_build_fancy_indexing_structure_offset(self): def test_build_fancy_indexing_structure_offset(self):
pass #TODO il codice di netgraph deve gestire questo caso pass #TODO il codice di netgraph deve gestire questo caso"""
def aux_build_time_scalar_indexing_structure_for_a_node(self, graph, node_indx, parents_indxs): def aux_build_time_scalar_indexing_structure_for_a_node(self, graph, node_id, parents_indxs, parents_labels, parents_vals):
time_scalar_indexing = graph.build_time_scalar_indexing_structure_for_a_node(node_indx, parents_indxs) time_scalar_indexing = graph.build_time_scalar_indexing_structure_for_a_node(node_id, parents_vals)
self.assertEqual(len(time_scalar_indexing), len(parents_indxs) + 1) self.assertEqual(len(time_scalar_indexing), len(parents_indxs) + 1)
merged_list = parents_indxs[:] merged_list = parents_labels[:]
merged_list.insert(0, node_indx) merged_list.insert(0, node_id)
#print(merged_list)
vals_list = [] vals_list = []
for node in merged_list: for node in merged_list:
vals_list.append(graph.get_states_number_by_indx(node)) vals_list.append(graph.get_states_number(node))
t_vec = np.array(vals_list) t_vec = np.array(vals_list)
t_vec = t_vec.cumprod() t_vec = t_vec.cumprod()
#print(t_vec)
self.assertTrue(np.array_equal(time_scalar_indexing, t_vec)) self.assertTrue(np.array_equal(time_scalar_indexing, t_vec))
def aux_build_transition_scalar_indexing_structure_for_a_node(self, graph, node_indx, parents_indxs): def aux_build_transition_scalar_indexing_structure_for_a_node(self, graph, node_id, parents_indxs, parents_labels,
transition_scalar_indexing = graph.build_transition_scalar_indexing_structure_for_a_node(node_indx, parents_values):
parents_indxs) transition_scalar_indexing = graph.build_transition_scalar_indexing_structure_for_a_node(node_id,
print(transition_scalar_indexing) parents_values)
self.assertEqual(len(transition_scalar_indexing), len(parents_indxs) + 2) self.assertEqual(len(transition_scalar_indexing), len(parents_indxs) + 2)
merged_list = parents_indxs[:] merged_list = parents_labels[:]
merged_list.insert(0, node_indx) merged_list.insert(0, node_id)
merged_list.insert(0, node_indx) merged_list.insert(0, node_id)
vals_list = [] vals_list = []
for node in merged_list: for node_id in merged_list:
vals_list.append(graph.get_states_number_by_indx(node)) vals_list.append(graph.get_states_number(node_id))
m_vec = np.array([vals_list]) m_vec = np.array([vals_list])
m_vec = m_vec.cumprod() m_vec = m_vec.cumprod()
self.assertTrue(np.array_equal(transition_scalar_indexing, m_vec)) self.assertTrue(np.array_equal(transition_scalar_indexing, m_vec))
def test_build_transition_scalar_indexing_structure(self): def test_build_transition_scalar_indexing_structure(self):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.list_of_nodes_labels()) g1.add_nodes(self.s1.structure.nodes_labels)
g1.add_edges(self.s1.structure.list_of_edges()) g1.add_edges(self.s1.structure.edges)
g1.aggregated_info_about_nodes_parents = g1.get_ord_set_of_par_of_all_nodes() g1.aggregated_info_about_nodes_parents = g1.get_ord_set_of_par_of_all_nodes()
p_labels = [i[0] for i in g1.aggregated_info_about_nodes_parents]
p_vals = g1.get_ordered_by_indx_parents_values_for_all_nodes()
fancy_indx = g1.build_fancy_indexing_structure(0) fancy_indx = g1.build_fancy_indexing_structure(0)
print(fancy_indx) for node_id, p_i ,p_l, p_v in zip(g1.graph_struct.nodes_labels, fancy_indx, p_labels, p_vals):
for node_id, p_indxs in zip(g1.graph_struct.list_of_nodes_indexes(), fancy_indx): self.aux_build_transition_scalar_indexing_structure_for_a_node(g1, node_id, p_i ,p_l, p_v)
self.aux_build_transition_scalar_indexing_structure_for_a_node(g1, node_id, p_indxs)
def test_build_time_scalar_indexing_structure(self): def test_build_time_scalar_indexing_structure(self):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.list_of_nodes_labels()) g1.add_nodes(self.s1.structure.nodes_labels)
g1.add_edges(self.s1.structure.list_of_edges()) g1.add_edges(self.s1.structure.edges)
g1.aggregated_info_about_nodes_parents = g1.get_ord_set_of_par_of_all_nodes() g1.aggregated_info_about_nodes_parents = g1.get_ord_set_of_par_of_all_nodes()
fancy_indx = g1.build_fancy_indexing_structure(0) fancy_indx = g1.build_fancy_indexing_structure(0)
p_labels = [i[0] for i in g1.aggregated_info_about_nodes_parents]
p_vals = g1.get_ordered_by_indx_parents_values_for_all_nodes()
#print(fancy_indx) #print(fancy_indx)
for node_id, p_indxs in zip(g1.graph_struct.list_of_nodes_indexes(), fancy_indx): for node_id, p_indxs, p_labels, p_v in zip(g1.graph_struct.nodes_labels, fancy_indx, p_labels, p_vals):
self.aux_build_time_scalar_indexing_structure_for_a_node(g1, node_id, p_indxs) self.aux_build_time_scalar_indexing_structure_for_a_node(g1, node_id, p_indxs, p_labels, p_v)
#TODO Sei arrivato QUI
def test_build_time_columns_filtering_structure(self): def test_build_time_columns_filtering_structure(self):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.list_of_nodes_labels()) g1.add_nodes(self.s1.structure.nodes_labels)
g1.add_edges(self.s1.structure.list_of_edges()) g1.add_edges(self.s1.structure.edges)
g1.aggregated_info_about_nodes_parents = g1.get_ord_set_of_par_of_all_nodes() g1.aggregated_info_about_nodes_parents = g1.get_ord_set_of_par_of_all_nodes()
g1._fancy_indexing = g1.build_fancy_indexing_structure(0) g1._fancy_indexing = g1.build_fancy_indexing_structure(0)
g1.build_time_columns_filtering_structure() g1.build_time_columns_filtering_structure()
print(g1.time_filtering)
t_filter = [] t_filter = []
for node_id, p_indxs in zip(g1.get_nodes_sorted_by_indx(), g1._fancy_indexing): for node_id, p_indxs in zip(g1.nodes, g1._fancy_indexing):
single_filter = [] single_filter = []
single_filter.append(g1.get_node_indx(node_id)) single_filter.append(g1.get_node_indx(node_id))
single_filter.extend(p_indxs) single_filter.extend(p_indxs)
@ -160,47 +162,72 @@ class TestNetworkGraph(unittest.TestCase):
def test_build_transition_columns_filtering_structure(self): def test_build_transition_columns_filtering_structure(self):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.list_of_nodes_labels()) g1.add_nodes(self.s1.structure.nodes_labels)
g1.add_edges(self.s1.structure.list_of_edges()) g1.add_edges(self.s1.structure.edges)
g1.aggregated_info_about_nodes_parents = g1.get_ord_set_of_par_of_all_nodes() g1.aggregated_info_about_nodes_parents = g1.get_ord_set_of_par_of_all_nodes()
g1._fancy_indexing = g1.build_fancy_indexing_structure(0) g1._fancy_indexing = g1.build_fancy_indexing_structure(0)
g1.build_transition_columns_filtering_structure() g1.build_transition_columns_filtering_structure()
print(g1.transition_filtering)
m_filter = [] m_filter = []
for node_id, p_indxs in zip(g1.get_nodes_sorted_by_indx(), g1._fancy_indexing): for node_id, p_indxs in zip(g1.nodes, g1._fancy_indexing):
single_filter = [] single_filter = []
single_filter.append(g1.get_node_indx(node_id) + g1.graph_struct.total_variables_number) single_filter.append(g1.get_node_indx(node_id) + g1.graph_struct.total_variables_number)
single_filter.append(g1.get_node_indx(node_id)) single_filter.append(g1.get_node_indx(node_id))
single_filter.extend(p_indxs) single_filter.extend(p_indxs)
m_filter.append(np.array(single_filter)) m_filter.append(np.array(single_filter))
print(m_filter)
for a1, a2 in zip(g1.transition_filtering, m_filter): for a1, a2 in zip(g1.transition_filtering, m_filter):
self.assertTrue(np.array_equal(a1, a2)) self.assertTrue(np.array_equal(a1, a2))
def test_init_graph(self): def test_build_p_combs_structure(self):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
#g1.build_scalar_indexing_structures() g1.add_nodes(self.s1.structure.nodes_labels)
lp = LineProfiler() g1.add_edges(self.s1.structure.edges)
#lp.add_function(g1.get_ordered_by_indx_set_of_parents) g1.aggregated_info_about_nodes_parents = g1.get_ord_set_of_par_of_all_nodes()
#lp.add_function(g1.get_states_number) p_vals = g1.get_ordered_by_indx_parents_values_for_all_nodes()
lp_wrapper = lp(g1.init_graph) p_combs = g1.build_p_combs_structure()
print(g1.time_scalar_indexing_strucure)
print(g1.transition_scalar_indexing_structure) for matrix, p_v in zip(p_combs, p_vals):
"""[array([3]), array([3, 9]), array([ 3, 9, 27])] p_possible_vals = []
[array([3, 9]), array([ 3, 9, 27]), array([ 3, 9, 27, 81])]""" for val in p_v:
lp_wrapper() vals = [v for v in range(val)]
lp.print_stats() p_possible_vals.extend(vals)
comb_struct = set(itertools.product(p_possible_vals,repeat=len(p_v)))
"""def test_remove_node(self): #print(comb_struct)
for comb in comb_struct:
self.assertIn(np.array(comb), matrix)
def test_fast_init(self):
g1 = ng.NetworkGraph(self.s1.structure) g1 = ng.NetworkGraph(self.s1.structure)
g2 = ng.NetworkGraph(self.s1.structure)
g1.init_graph() g1.init_graph()
g1.remove_node('Y') for indx, node in enumerate(g1.nodes):
print(g1.get_nodes()) g2.fast_init(node)
print(g1.get_edges())""" self.assertListEqual(g2._fancy_indexing, g1._fancy_indexing[indx])
self.assertTrue(np.array_equal(g2.time_scalar_indexing_strucure, g1.time_scalar_indexing_strucure[indx]))
self.assertTrue(np.array_equal(g2.transition_scalar_indexing_structure, g1.transition_scalar_indexing_structure[indx]))
self.assertTrue(np.array_equal(g2.time_filtering, g1.time_filtering[indx]))
self.assertTrue(np.array_equal(g2.transition_filtering, g1.transition_filtering[indx]))
self.assertTrue(np.array_equal(g2.p_combs, g1.p_combs[indx]))
def test_get_parents_by_id(self):
g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.nodes_labels)
g1.add_edges(self.s1.structure.edges)
for node in g1.nodes:
self.assertListEqual(g1.get_parents_by_id(node), list(g1.graph.predecessors(node)))
def test_get_states_number(self):
g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.nodes_labels)
g1.add_edges(self.s1.structure.edges)
for node, val in zip(g1.nodes, g1.nodes_values):
self.assertEqual(val, g1.get_states_number(node))
def test_get_node_indx(self):
g1 = ng.NetworkGraph(self.s1.structure)
g1.add_nodes(self.s1.structure.nodes_labels)
g1.add_edges(self.s1.structure.edges)
for node, indx in zip(g1.nodes, g1.nodes_indexes):
self.assertEqual(indx, g1.get_node_indx(node))
#TODO mancano i test sulle property e sui getters_vari
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

@ -4,6 +4,7 @@ from line_profiler import LineProfiler
import network_graph as ng import network_graph as ng
import sample_path as sp import sample_path as sp
import set_of_cims as sofc
import sets_of_cims_container as scc import sets_of_cims_container as scc
import parameters_estimator as pe import parameters_estimator as pe
import json_importer as ji import json_importer as ji
@ -19,35 +20,33 @@ class TestParametersEstimatior(unittest.TestCase):
cls.g1 = ng.NetworkGraph(cls.s1.structure) cls.g1 = ng.NetworkGraph(cls.s1.structure)
cls.g1.init_graph() cls.g1.init_graph()
def test_init(self): def test_fast_init(self):
self.aux_test_init(self.s1, self.g1) for node in self.g1.nodes:
g = ng.NetworkGraph(self.s1.structure)
def test_init_sets_of_cims_container(self): g.fast_init(node)
self.aux_test_init_sets_cims_container(self.s1, self.g1) p1 = pe.ParametersEstimator(self.s1, g)
self.assertEqual(p1.sample_path, self.s1)
def aux_test_init(self, sample_p, graph): self.assertEqual(p1.net_graph, g)
pe1 = pe.ParametersEstimator(sample_p, graph) self.assertIsNone(p1.single_set_of_cims)
self.assertEqual(sample_p, pe1.sample_path) p1.fast_init(node)
self.assertEqual(graph, pe1.net_graph) self.assertIsInstance(p1.single_set_of_cims, sofc.SetOfCims)
self.assertIsNone(pe1.sets_of_cims_struct)
def test_compute_parameters_for_node(self):
def aux_test_init_sets_cims_container(self, sample_p, graph): for indx, node in enumerate(self.g1.nodes):
pe1 = pe.ParametersEstimator(sample_p, graph) print(node)
pe1.init_sets_cims_container() g = ng.NetworkGraph(self.s1.structure)
self.assertIsInstance(pe1.sets_of_cims_struct, scc.SetsOfCimsContainer) g.fast_init(node)
p1 = pe.ParametersEstimator(self.s1, g)
def test_compute_parameters(self): p1.fast_init(node)
self.aux_test_compute_parameters(self.s1, self.g1) sofc1 = p1.compute_parameters_for_node(node)
sampled_cims = self.aux_import_sampled_cims('dyn.cims')
def aux_test_compute_parameters(self, sample_p, graph): sc = list(sampled_cims.values())
pe1 = pe.ParametersEstimator(sample_p, graph) print(sc[indx])
pe1.init_sets_cims_container() self.equality_of_cims_of_node(sc[indx], sofc1.actual_cims)
pe1.compute_parameters()
samples_cims = self.aux_import_sampled_cims('dyn.cims')
for indx, sc in enumerate(samples_cims.values()):
self.equality_of_cims_of_node(sc, pe1.sets_of_cims_struct.get_set_of_cims(indx).get_cims())
def equality_of_cims_of_node(self, sampled_cims, estimated_cims): def equality_of_cims_of_node(self, sampled_cims, estimated_cims):
#print(sampled_cims)
print(estimated_cims)
self.assertEqual(len(sampled_cims), len(estimated_cims)) self.assertEqual(len(sampled_cims), len(estimated_cims))
for c1, c2 in zip(sampled_cims, estimated_cims): for c1, c2 in zip(sampled_cims, estimated_cims):
self.cim_equality_test(c1, c2.cim) self.cim_equality_test(c1, c2.cim)
@ -56,27 +55,31 @@ class TestParametersEstimatior(unittest.TestCase):
for r1, r2 in zip(cim1, cim2): for r1, r2 in zip(cim1, cim2):
self.assertTrue(np.all(np.isclose(r1, r2, 1e-01, 1e-01) == True)) self.assertTrue(np.all(np.isclose(r1, r2, 1e-01, 1e-01) == True))
def test_compute_parameters_for_node(self):#TODO Questo non è un test
pe1 = pe.ParametersEstimator(self.s1, self.g1)
#pe1.init_sets_cims_container()
lp = LineProfiler()
lp_wrapper = lp(pe1.init_sets_cims_container)
#lp.add_function(pe1.sets_of_cims_struct.init_cims_structure)
lp_wrapper()
lp.print_stats()
#pe1.init_sets_cims_container()
#pe1.compute_parameters_for_node('Y')
print(pe1.sets_of_cims_struct.get_set_of_cims(0).actual_cims)
def aux_import_sampled_cims(self, cims_label): def aux_import_sampled_cims(self, cims_label):
i1 = ji.JsonImporter('../data', '', '', '', '', '') i1 = ji.JsonImporter('../data', '', '', '', '', '')
raw_data = i1.read_json_file() raw_data = i1.read_json_file()
return i1.import_sampled_cims(raw_data, 0, cims_label) return i1.import_sampled_cims(raw_data, 0, cims_label)
"""
def test_init(self):
self.aux_test_init(self.s1, self.g1)
def test_init_sets_of_cims_container(self):
self.aux_test_init_sets_cims_container(self.s1, self.g1)
def aux_test_init(self, sample_p, graph):
pe1 = pe.ParametersEstimator(sample_p, graph)
self.assertEqual(sample_p, pe1.sample_path)
self.assertEqual(graph, pe1.net_graph)
self.assertIsNone(pe1.sets_of_cims_struct)
def aux_test_init_sets_cims_container(self, sample_p, graph):
pe1 = pe.ParametersEstimator(sample_p, graph)
pe1.init_sets_cims_container()
self.assertIsInstance(pe1.sets_of_cims_struct, scc.SetsOfCimsContainer)
def test_compute_parameters(self):
self.aux_test_compute_parameters(self.s1, self.g1)
"""
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

@ -11,64 +11,84 @@ class TestSetOfCims(unittest.TestCase):
def setUpClass(cls) -> None: def setUpClass(cls) -> None:
cls.node_id = 'X' cls.node_id = 'X'
cls.possible_cardinalities = [2, 3] cls.possible_cardinalities = [2, 3]
#cls.possible_states = [[0,1], [0, 1, 2]] cls.possible_states = [[0,1], [0, 1, 2]]
cls.node_states_number = range(2, 4) cls.node_states_number = range(2, 4)
def test_init(self): def test_init(self):
# empty parent set # empty parent set
for sn in self.node_states_number: for sn in self.node_states_number:
self.aux_test_init(self.node_id, [], sn) p_combs = self.build_p_comb_structure_for_a_node([])
self.aux_test_init(self.node_id, [], sn, p_combs)
# one parent # one parent
for sn in self.node_states_number: for sn in self.node_states_number:
for p in itertools.product(self.possible_cardinalities, repeat=1): for p in itertools.product(self.possible_cardinalities, repeat=1):
self.aux_test_init(self.node_id, list(p), sn) p_combs = self.build_p_comb_structure_for_a_node(list(p))
self.aux_test_init(self.node_id, list(p), sn, p_combs)
#two parents #two parents
for sn in self.node_states_number: for sn in self.node_states_number:
for p in itertools.product(self.possible_cardinalities, repeat=2): for p in itertools.product(self.possible_cardinalities, repeat=2):
self.aux_test_init(self.node_id, list(p), sn) p_combs = self.build_p_comb_structure_for_a_node(list(p))
self.aux_test_init(self.node_id, list(p), sn, p_combs)
def test_indexes_converter(self): def test_build_cims(self):
# empty parent set # empty parent set
for sn in self.node_states_number: for sn in self.node_states_number:
self.aux_test_indexes_converter(self.node_id, [], sn) p_combs = self.build_p_comb_structure_for_a_node([])
self.aux_test_build_cims(self.node_id, [], sn, p_combs)
# one parent # one parent
for sn in self.node_states_number: for sn in self.node_states_number:
for p in itertools.product(self.possible_cardinalities, repeat=1): for p in itertools.product(self.possible_cardinalities, repeat=1):
self.aux_test_init(self.node_id, list(p), sn) p_combs = self.build_p_comb_structure_for_a_node(list(p))
# two parents self.aux_test_build_cims(self.node_id, list(p), sn, p_combs)
#two parents
for sn in self.node_states_number: for sn in self.node_states_number:
for p in itertools.product(self.possible_cardinalities, repeat=2): for p in itertools.product(self.possible_cardinalities, repeat=2):
self.aux_test_init(self.node_id, list(p), sn) p_combs = self.build_p_comb_structure_for_a_node(list(p))
self.aux_test_build_cims(self.node_id, list(p), sn, p_combs)
def aux_test_indexes_converter(self, node_id, parents_states_number, node_states_number):
sofcims = soci.SetOfCims(node_id, parents_states_number, node_states_number)
if not parents_states_number:
self.assertEqual(sofcims.indexes_converter([]), 0)
else:
parents_possible_values = []
for cardi in parents_states_number:
parents_possible_values.extend(range(0, cardi))
for p in itertools.permutations(parents_possible_values, len(parents_states_number)):
self.assertEqual(sofcims.indexes_converter(list(p)), np.ravel_multi_index(list(p), parents_states_number))
def test_build_cims(self): def test_filter_cims_with_mask(self):
p_combs = self.build_p_comb_structure_for_a_node(self.possible_cardinalities)
sofc1 = soci.SetOfCims('X', self.possible_cardinalities, 3, p_combs)
state_res_times_list = [] state_res_times_list = []
transition_matrices_list = [] transition_matrices_list = []
so1 = soci.SetOfCims('X',[3], 3) for i in range(len(p_combs)):
for i in range(0, 3):
state_res_times = np.random.rand(1, 3)[0] state_res_times = np.random.rand(1, 3)[0]
state_res_times = state_res_times * 1000 state_res_times = state_res_times * 1000
state_transition_matrix = np.random.randint(1, 10000, (3, 3)) state_transition_matrix = np.random.randint(1, 10000, (3, 3))
state_res_times_list.append(state_res_times) state_res_times_list.append(state_res_times)
transition_matrices_list.append(state_transition_matrix) transition_matrices_list.append(state_transition_matrix)
sofc1.build_cims(state_res_times_list, transition_matrices_list)
for length_of_mask in range(3):
for mask in list(itertools.permutations([True, False],r=length_of_mask)):
m = np.array(mask)
for parent_value in range(self.possible_cardinalities[0]):
cims = sofc1.filter_cims_with_mask(m, [parent_value])
if length_of_mask == 0 or length_of_mask == 1:
self.assertTrue(np.array_equal(sofc1.actual_cims, cims))
else:
indxs = self.another_filtering_method(p_combs, m, [parent_value])
self.assertTrue(np.array_equal(cims, sofc1.actual_cims[indxs]))
def aux_test_build_cims(self, node_id, p_values, node_states, p_combs):
state_res_times_list = []
transition_matrices_list = []
so1 = soci.SetOfCims(node_id, p_values, node_states, p_combs)
for i in range(len(p_combs)):
state_res_times = np.random.rand(1, node_states)[0]
state_res_times = state_res_times * 1000
state_transition_matrix = np.random.randint(1, 10000, (node_states, node_states))
state_res_times_list.append(state_res_times)
transition_matrices_list.append(state_transition_matrix)
so1.build_cims(state_res_times_list, transition_matrices_list) so1.build_cims(state_res_times_list, transition_matrices_list)
self.assertEqual(len(state_res_times_list), so1.get_cims_number()) self.assertEqual(len(state_res_times_list), so1.get_cims_number())
self.assertIsInstance(so1.actual_cims, np.ndarray)
self.assertIsNone(so1.transition_matrices) self.assertIsNone(so1.transition_matrices)
self.assertIsNone(so1.state_residence_times) self.assertIsNone(so1.state_residence_times)
def aux_test_init(self, node_id, parents_states_number, node_states_number): def aux_test_init(self, node_id, parents_states_number, node_states_number, p_combs):
sofcims = soci.SetOfCims(node_id, parents_states_number, node_states_number) sofcims = soci.SetOfCims(node_id, parents_states_number, node_states_number, p_combs)
self.assertEqual(sofcims.node_id, node_id) self.assertEqual(sofcims.node_id, node_id)
self.assertTrue(np.array_equal(sofcims.p_combs, p_combs))
self.assertTrue(np.array_equal(sofcims.parents_states_number, parents_states_number)) self.assertTrue(np.array_equal(sofcims.parents_states_number, parents_states_number))
self.assertEqual(sofcims.node_states_number, node_states_number) self.assertEqual(sofcims.node_states_number, node_states_number)
self.assertFalse(sofcims.actual_cims) self.assertFalse(sofcims.actual_cims)
@ -77,7 +97,46 @@ class TestSetOfCims(unittest.TestCase):
self.assertEqual(sofcims.transition_matrices.shape[0], np.prod(np.array(parents_states_number))) self.assertEqual(sofcims.transition_matrices.shape[0], np.prod(np.array(parents_states_number)))
self.assertEqual(len(sofcims.transition_matrices[0][0]), node_states_number) self.assertEqual(len(sofcims.transition_matrices[0][0]), node_states_number)
def aux_test_indexes_converter(self, node_id, parents_states_number, node_states_number):
sofcims = soci.SetOfCims(node_id, parents_states_number, node_states_number)
if not parents_states_number:
self.assertEqual(sofcims.indexes_converter([]), 0)
else:
parents_possible_values = []
for cardi in parents_states_number:
parents_possible_values.extend(range(0, cardi))
for p in itertools.permutations(parents_possible_values, len(parents_states_number)):
self.assertEqual(sofcims.indexes_converter(list(p)), np.ravel_multi_index(list(p), parents_states_number))
def build_p_comb_structure_for_a_node(self, parents_values):
"""
Builds the combinatory structure that contains the combinations of all the values contained in parents_values.
Parameters:
parents_values: the cardinalities of the nodes
Returns:
a numpy matrix containing a grid of the combinations
"""
tmp = []
for val in parents_values:
tmp.append([x for x in range(val)])
if len(parents_values) > 0:
parents_comb = np.array(np.meshgrid(*tmp)).T.reshape(-1, len(parents_values))
if len(parents_values) > 1:
tmp_comb = parents_comb[:, 1].copy()
parents_comb[:, 1] = parents_comb[:, 0].copy()
parents_comb[:, 0] = tmp_comb
else:
parents_comb = np.array([[]], dtype=np.int)
return parents_comb
def another_filtering_method(self,p_combs, mask, parent_value):
masked_combs = p_combs[:, mask]
indxs = []
for indx, val in enumerate(masked_combs):
if val == parent_value:
indxs.append(indx)
return np.array(indxs)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()