1
0
Fork 0

Implemented fast parameters estimation algorithm

parallel_struct_est
philpMartin 4 years ago
parent 3373f0f76e
commit cd678b6a5b
  1. 7
      main_package/classes/amalgamated_cims.py
  2. 21
      main_package/classes/conditional_intensity_matrix.py
  3. 23
      main_package/classes/json_importer.py
  4. 81
      main_package/classes/network_graph.py
  5. 180
      main_package/classes/parameters_estimator.py
  6. 9
      main_package/classes/sample_path.py
  7. 42
      main_package/classes/set_of_cims.py
  8. 3
      main_package/classes/structure.py
  9. 6
      main_package/classes/trajectory.py

@ -14,8 +14,8 @@ class AmalgamatedCims:
#self.states_per_variable = states_number
def init_cims_structure(self, keys, states_number_per_node, list_of_parents_states_number):
print(keys)
print(list_of_parents_states_number)
#print(keys)
#print(list_of_parents_states_number)
for indx, key in enumerate(keys):
self.sets_of_cims.append(
socim.SetOfCims(key, list_of_parents_states_number[indx], states_number_per_node[indx]))
@ -23,6 +23,9 @@ class AmalgamatedCims:
def get_set_of_cims(self, node_id):
return self.sets_of_cims[node_id]
def get_cims_of_node(self, node_indx, cim_indx):
return self.sets_of_cims[node_indx].get_cim(cim_indx)
def get_vars_order(self, node):
return self.actual_cims[node][1]

@ -3,10 +3,11 @@ import numpy as np
class ConditionalIntensityMatrix:
def __init__(self, dimension):
self.state_residence_times = np.zeros(shape=dimension)
self.state_transition_matrix = np.zeros(shape=(dimension, dimension), dtype=int)
self.cim = np.zeros(shape=(dimension, dimension), dtype=float)
def __init__(self, dimension, state_residence_times, state_transition_matrix):
self.state_residence_times = state_residence_times
self.state_transition_matrix = state_transition_matrix
#self.cim = np.zeros(shape=(dimension, dimension), dtype=float)
self.cim = self.state_transition_matrix.astype(np.float)
def update_state_transition_count(self, element_indx):
#print(element_indx)
@ -18,11 +19,9 @@ class ConditionalIntensityMatrix:
self.state_residence_times[state] += time
def compute_cim_coefficients(self):
for i, row in enumerate(self.state_transition_matrix):
row_sum = 0.0
for j, elem in enumerate(row):
rate_coefficient = elem / self.state_residence_times[i]
self.cim[i][j] = rate_coefficient
row_sum = row_sum + rate_coefficient
self.cim[i][i] = -1 * row_sum
np.fill_diagonal(self.cim, self.cim.diagonal() * -1)
self.cim = ((self.cim.T + 1) / (self.state_residence_times + 1)).T
def __repr__(self):
return 'CIM:\n' + str(self.cim)

@ -90,28 +90,31 @@ class JsonImporter(AbstractImporter):
"""
for sample_indx, sample in enumerate(raw_data[indx][trajectories_key]):
self.df_samples_list.append(pd.json_normalize(raw_data[indx][trajectories_key][sample_indx]))
#print(sample_indx, self.df_samples_list[sample_indx])
def compute_row_delta_sigle_samples_frame(self, sample_frame):
columns_header = list(sample_frame.columns.values)
# print(columns_header)
#print(columns_header)
for col_name in columns_header:
if col_name == 'Time':
sample_frame[col_name + 'Delta'] = sample_frame[col_name].diff()
else:
sample_frame[col_name + 'Delta'] = (sample_frame[col_name].diff().bfill() != 0).astype(int)
sample_frame[col_name + 'Delta'] = sample_frame[col_name]
sample_frame['Time'] = sample_frame['TimeDelta']
del sample_frame['TimeDelta']
sample_frame['Time'] = sample_frame['Time'].shift(-1)
columns_header = list(sample_frame.columns.values)
#print(columns_header[4:])
#print(columns_header[4:]) #TODO rimuovere dipendeza diretta da 'Time' e 4
for column in columns_header[4:]:
sample_frame[column] = sample_frame[column].shift(1)
sample_frame[column] = sample_frame[column].fillna(0)
sample_frame[column] = sample_frame[column].shift(-1)
#sample_frame[column] = sample_frame[column].fillna(0)"""
sample_frame.drop(sample_frame.tail(1).index, inplace=True)
#print(sample_frame)
#print("After Time Delta",sample_frame)
def compute_row_delta_in_all_samples_frames(self):
for sample in self.df_samples_list:
for indx, sample in enumerate(self.df_samples_list):
#print(indx)
#print(self.df_samples_list[299])
self.compute_row_delta_sigle_samples_frame(sample)
self.concatenated_samples = pd.concat(self.df_samples_list)
@ -139,6 +142,7 @@ class JsonImporter(AbstractImporter):
"""
for indx in range(len(self.df_samples_list)):
self.df_samples_list[indx] = self.df_samples_list[indx].iloc[0:0]
self.concatenated_samples = self.concatenated_samples.iloc[0:0]
"""ij = JsonImporter("../data")
@ -146,6 +150,7 @@ ij.import_data()
#print(ij.df_samples_list[7])
print(ij.df_structure)
print(ij.df_variables)
print(ij.concatenated_samples)
#print((ij.build_list_of_samples_array(0)[1].size))
ij.compute_row_delta_in_all_samples_frames()
print(ij.df_samples_list[0])"""
#ij.compute_row_delta_in_all_samples_frames()
#print(ij.df_samples_list[0])"""

@ -19,10 +19,18 @@ class NetworkGraph():
def __init__(self, graph_struct):
self.graph_struct = graph_struct
self.graph = nx.DiGraph()
self.scalar_indexing_structure = []
self.transition_scalar_indexing_structure = []
self.filtering_structure = []
self.transition_filtering = []
def init_graph(self):
self.add_nodes(self.graph_struct.list_of_nodes())
self.add_edges(self.graph_struct.list_of_edges())
self.build_scalar_indexing_structure()
self.build_columns_filtering_structure()
self.build_transition_scalar_indexing_structure()
self.build_transition_columns_filtering_structure()
def add_nodes(self, list_of_nodes):
for indx, id in enumerate(list_of_nodes):
@ -77,6 +85,60 @@ class NetworkGraph():
index_structure.append(np.array(indexes_for_a_node, dtype=np.int))
return index_structure
def build_scalar_indexing_structure_for_a_node(self, node_id, parents_id):
print(parents_id)
T_vector = np.array([self.graph_struct.variables_frame.iloc[node_id, 1].astype(np.int)])
print(T_vector)
T_vector = np.append(T_vector, [self.graph_struct.variables_frame.iloc[x, 1] for x in parents_id])
print(T_vector)
T_vector = T_vector.cumprod().astype(np.int)
return T_vector
print(T_vector)
def build_scalar_indexing_structure(self):
parents_indexes_list = self.build_fancy_indexing_structure(0)
for node_indx, p_indxs in enumerate(parents_indexes_list):
if p_indxs.size == 0:
self.scalar_indexing_structure.append(np.array([self.get_states_number_by_indx(node_indx)], dtype=np.int))
else:
self.scalar_indexing_structure.append(
self.build_scalar_indexing_structure_for_a_node(node_indx, p_indxs))
def build_transition_scalar_indexing_structure_for_a_node(self, node_id, parents_id):
M_vector = np.array([self.graph_struct.variables_frame.iloc[node_id, 1],
self.graph_struct.variables_frame.iloc[node_id, 1].astype(np.int)])
M_vector = np.append(M_vector, [self.graph_struct.variables_frame.iloc[x, 1] for x in parents_id])
M_vector = M_vector.cumprod().astype(np.int)
return M_vector
def build_transition_scalar_indexing_structure(self):
parents_indexes_list = self.build_fancy_indexing_structure(0)
for node_indx, p_indxs in enumerate(parents_indexes_list):
"""if p_indxs.size == 0:
self.scalar_indexing_structure.append(
np.array([self.get_states_number_by_indx(node_indx)], dtype=np.int))
else:"""
self.transition_scalar_indexing_structure.append(
self.build_transition_scalar_indexing_structure_for_a_node(node_indx, p_indxs))
def build_columns_filtering_structure(self):
parents_indexes_list = self.build_fancy_indexing_structure(0)
for node_indx, p_indxs in enumerate(parents_indexes_list):
if p_indxs.size == 0:
self.filtering_structure.append(np.append(p_indxs, np.array([node_indx], dtype=np.int)))
else:
self.filtering_structure.append(np.append(np.array([node_indx], dtype=np.int), p_indxs))
def build_transition_columns_filtering_structure(self):
parents_indexes_list = self.build_fancy_indexing_structure(0)
nodes_number = len(parents_indexes_list)
for node_indx, p_indxs in enumerate(parents_indexes_list):
#if p_indxs.size == 0:
#self.filtering_structure.append(np.append(p_indxs, np.array([node_indx], dtype=np.int)))
#else:
self.transition_filtering.append(np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int))
def get_nodes(self):
return list(self.graph.nodes)
@ -89,6 +151,9 @@ class NetworkGraph():
def get_states_number(self, node_id):
return self.graph_struct.get_states_number(node_id)
def get_states_number_by_indx(self, node_indx):
return self.graph_struct.get_states_number_by_indx(node_indx)
def get_node_by_index(self, node_indx):
return self.graph_struct.get_node_id(node_indx)
@ -118,8 +183,16 @@ for node in g1.get_parents_by_id('Z'):
# print(g1.get_node_by_index(node))
print(node)
print(g1.get_ordered_by_indx_parents_values_for_all_nodes())
print(g1.build_fancy_indexing_structure())
print(g1.get_states_number_of_all_nodes_sorted())"""
#print(g1.build_fancy_indexing_structure(0))
#print(g1.get_states_number_of_all_nodes_sorted())
g1.build_scalar_indexing_structure()
print(g1.scalar_indexing_structure)
print(g1.build_columns_filtering_structure())
g1.build_transition_scalar_indexing_structure()
print(g1.transition_scalar_indexing_structure)
g1.build_transition_columns_filtering_structure()
print(g1.transition_filtering)
[array([3, 9]), array([ 3, 9, 27]), array([ 3, 9, 27, 81])]
[array([3, 0]), array([4, 1, 2]), array([5, 2, 0, 1])]"""

@ -1,7 +1,6 @@
import os
import time as tm
from line_profiler import LineProfiler
from multiprocessing import Process
import numba as nb
import numpy as np
@ -15,7 +14,10 @@ class ParametersEstimator:
def __init__(self, sample_path, net_graph):
self.sample_path = sample_path
self.net_graph = net_graph
self.fancy_indexing_structure = self.net_graph.build_fancy_indexing_structure(1)
self.scalar_indexes_converter = self.net_graph.scalar_indexing_structure
self.columns_filtering_structure = self.net_graph.filtering_structure
self.transition_scalar_index_converter = self.net_graph.transition_scalar_indexing_structure
self.transition_filtering = self.net_graph.transition_filtering
self.amalgamated_cims_struct = None
def init_amalgamated_cims_struct(self):
@ -37,16 +39,21 @@ class ParametersEstimator:
row_length = trajectory.shape[1]
for indx, row in enumerate(trajectory[:-1]):
self.compute_sufficient_statistics_for_row(trajectory[indx], trajectory[indx + 1], row_length)
self.compute_sufficient_statistics_for_trajectory(trajectory.times, trajectory.actual_trajectory, trajectory.transitions, row_length)
def compute_sufficient_statistics_for_row(self, current_row, next_row, row_length):
def compute_sufficient_statistics_for_trajectory(self, times, traj_values, traj_transitions, row_length):
#time = self.compute_time_delta(current_row, next_row)
time = current_row[0]
for indx in range(1, row_length):
if current_row[indx] != next_row[indx] and next_row[indx] != -1:
transition = [indx - 1, (current_row[indx], next_row[indx])]
which_node = transition[0]
which_matrix = self.which_matrix_to_update(current_row, transition[0])
#time = current_row[0]
print(times)
print(traj_values)
print(traj_transitions)
for row in traj_transitions:
time = times[0]
for indx in range(0, row_length):
if row[indx] == 1:
which_node = indx
transition = [which_node, (traj_values[indx - 1], traj_values[indx])]
which_matrix = self.which_matrix_to_update(row, which_node)
which_element = transition[1]
self.amalgamated_cims_struct.update_state_transition_for_matrix(which_node, which_matrix, which_element)
which_element = transition[1][0]
@ -54,9 +61,9 @@ class ParametersEstimator:
which_element,
time)
else:
which_node = indx - 1
which_matrix = self.which_matrix_to_update(current_row, which_node)
which_element = current_row[indx]
which_node = indx
which_matrix = self.which_matrix_to_update(row, which_node)
which_element = row[indx]
self.amalgamated_cims_struct.update_state_residence_time_for_matrix(
which_node, which_matrix, which_element, time)
@ -181,6 +188,72 @@ class ParametersEstimator:
#t1 = tm.time() - t0
#print("Elapsed Time ", t1)
def compute_parameters(self):
for node_indx, set_of_cims in enumerate(self.amalgamated_cims_struct.sets_of_cims):
self.compute_state_res_time_for_node(node_indx, self.sample_path.trajectories[0].get_times(),
self.sample_path.trajectories[0].get_trajectory(),
self.columns_filtering_structure[node_indx],
self.scalar_indexes_converter[node_indx],
set_of_cims.state_residence_times)
self.compute_state_transitions_for_a_node(node_indx,
self.sample_path.trajectories[0].get_complete_trajectory(),
self.transition_filtering[node_indx],
self.transition_scalar_index_converter[node_indx],
set_of_cims.transition_matrices)
set_of_cims.build_cims(set_of_cims.state_residence_times, set_of_cims.transition_matrices)
def compute_state_res_time_for_node(self, node_indx, times, trajectory, cols_filter, scalar_indexes_struct, T):
#print(times)
#print(trajectory)
#print(cols_filter)
#print(scalar_indexes_struct)
#print(T)
T[:] = np.bincount(np.sum(trajectory[:, cols_filter] * scalar_indexes_struct / scalar_indexes_struct[0], axis=1)
.astype(np.int), \
times,
minlength=scalar_indexes_struct[-1]).reshape(-1, T.shape[1])
#print("Done This NODE", T)
def compute_state_residence_time_for_all_nodes(self):
for node_indx, set_of_cims in enumerate(self.amalgamated_cims_struct.sets_of_cims):
self.compute_state_res_time_for_node(node_indx, self.sample_path.trajectories[0].get_times(),
self.sample_path.trajectories[0].get_trajectory(), self.columns_filtering_structure[node_indx],
self.scalar_indexes_converter[node_indx], set_of_cims.state_residence_times)
def compute_state_transitions_for_a_node(self, node_indx, trajectory, cols_filter, scalar_indexing, M):
#print(node_indx)
#print(trajectory)
#print(cols_filter)
#print(scalar_indexing)
#print(M)
diag_indices = np.array([x * M.shape[1] + x % M.shape[1] for x in range(M.shape[0] * M.shape[1])],
dtype=np.int64)
trj_tmp = trajectory[trajectory[:, int(trajectory.shape[1] / 2) + node_indx].astype(np.int) >= 0]
#print(trj_tmp)
#print("Summing", np.sum(trj_tmp[:, cols_filter] * scalar_indexing / scalar_indexing[0], axis=1).astype(np.int))
#print(M.shape[1])
#print(M.shape[2])
M[:] = np.bincount(np.sum(trj_tmp[:, cols_filter] * scalar_indexing / scalar_indexing[0], axis=1).astype(np.int),
minlength=scalar_indexing[-1]).reshape(-1, M.shape[1], M.shape[2])
M_raveled = M.ravel()
M_raveled[diag_indices] = 0
#print(M_raveled)
M_raveled[diag_indices] = np.sum(M, axis=2).ravel()
#print(M_raveled)
#print(M)
def compute_state_transitions_for_all_nodes(self):
for node_indx, set_of_cims in enumerate(self.amalgamated_cims_struct.sets_of_cims):
self.compute_state_transitions_for_a_node(node_indx, self.sample_path.trajectories[0].get_complete_trajectory(),
self.transition_filtering[node_indx],
self.transition_scalar_index_converter[node_indx], set_of_cims.transition_matrices)
# Simple Test #
@ -197,15 +270,25 @@ g1.init_graph()
pe = ParametersEstimator(s1, g1)
pe.init_amalgamated_cims_struct()
print(pe.amalgamated_cims_struct.get_set_of_cims(0).get_cims_number())
print(pe.amalgamated_cims_struct.get_set_of_cims(1).get_cims_number())
print(pe.amalgamated_cims_struct.get_set_of_cims(2).get_cims_number())
print(np.shape(s1.trajectories[0].transitions)[0])
#print(pe.amalgamated_cims_struct.get_set_of_cims(0).get_cims_number())
#print(pe.amalgamated_cims_struct.get_set_of_cims(1).get_cims_number())
#print(pe.amalgamated_cims_struct.get_set_of_cims(2).get_cims_number())
#print(np.shape(s1.trajectories[0].transitions)[0])
#print(pe.columns_filtering_structure)
#print(pe.scalar_indexes_converter)
#print(pe.amalgamated_cims_struct.sets_of_cims[1].state_residence_times)
#print(pe.amalgamated_cims_struct.sets_of_cims[2].state_residence_times)
#print(pe.amalgamated_cims_struct.sets_of_cims[2].transition_matrices)
#print(pe.amalgamated_cims_struct.sets_of_cims[1].transition_matrices)
#print(pe.amalgamated_cims_struct.sets_of_cims[0].transition_matrices)
#pe.compute_state_transitions_for_all_nodes()
lp = LineProfiler()
"""pe.compute_state_residence_time_for_all_nodes()
#pe.parameters_estimation_for_variable(0, pe.sample_path.trajectories[0].get_trajectory()[:, 0],
# pe.sample_path.trajectories[0].get_trajectory()[:, 1], [])
#pe.parameters_estimation_single_trajectory(pe.sample_path.trajectories[0].get_trajectory())
#pe.parameters_estimation()
lp = LineProfiler()
#lp.add_function(pe.compute_sufficient_statistics_for_row) # add additional function to profile
#lp_wrapper = lp(pe.parameters_estimation_single_trajectory)
#lp_wrapper = lp(pe.parameters_estimation)
@ -218,7 +301,7 @@ lp = LineProfiler()
#pe.sample_path.trajectories[0].get_trajectory()[:, [0,1]])
"""lp_wrapper = lp(pe.parameters_estimation_for_variable_single_parent)
lp_wrapper = lp(pe.parameters_estimation_for_variable_single_parent)
lp_wrapper(1, pe.sample_path.trajectories[0].get_times(),
pe.sample_path.trajectories[0].get_trajectory()[:, 1],
pe.sample_path.trajectories[0].get_trajectory()[:, 2])
@ -245,7 +328,64 @@ lp_wrapper(2, pe.sample_path.trajectories[0].get_times(), pe.sample_path.traject
pe.sample_path.trajectories[0].get_trajectory()[:,2], pe.sample_path.trajectories[0].get_trajectory()[:, [0,1]] )
lp.print_stats()"""
lp_wrapper = lp(pe.parameters_estimation_for_variable_single_parent_in_place)
"""lp_wrapper = lp(pe.parameters_estimation_for_variable_single_parent_in_place)
lp_wrapper(1, pe.sample_path.trajectories[0].get_times(), pe.sample_path.trajectories[0].transitions[:, 1],
pe.sample_path.trajectories[0].get_trajectory()[:,1], pe.sample_path.trajectories[0].get_trajectory()[:,2], (3,3,3) )
lp.print_stats()"""
"""lp_wrapper = lp(pe.compute_sufficient_statistics_for_trajectory)
lp_wrapper(pe.sample_path.trajectories[0].get_times(), pe.sample_path.trajectories[0].actual_trajectory,
pe.sample_path.trajectories[0].transitions, 3)
lp.print_stats()
lp_wrapper = lp(pe.compute_state_res_time_for_node)
lp_wrapper(0, pe.sample_path.trajectories[0].get_times(),
pe.sample_path.trajectories[0].actual_trajectory, [0], [3], np.zeros([3,3], dtype=np.float))
lp.print_stats()
#pe.compute_state_res_time_for_node(0, pe.sample_path.trajectories[0].get_times(),
#pe.sample_path.trajectories[0].actual_trajectory, [0], [3], np.zeros([3,3], dtype=np.float))"""
"""[[2999.2966 2749.2298 3301.5975]
[3797.1737 3187.8345 2939.2009]
[3432.224 3062.5402 4530.9028]]
[[ 827.6058 838.1515 686.1365]
[1426.384 2225.2093 1999.8528]
[ 745.3068 733.8129 746.2347]
[ 520.8113 690.9502 853.4022]
[1590.8609 1853.0021 1554.1874]
[ 637.5576 643.8822 654.9506]
[ 718.7632 742.2117 998.5844]
[1811.984 1598.0304 2547.988 ]
[ 770.8503 598.9588 984.3304]]
lp_wrapper = lp(pe.compute_state_residence_time_for_all_nodes)
lp_wrapper()
lp.print_stats()
#pe.compute_state_residence_time_for_all_nodes()
print(pe.amalgamated_cims_struct.sets_of_cims[0].state_residence_times)
[[[14472, 3552, 10920],
[12230, 25307, 13077],
[ 9707, 14408, 24115]],
[[22918, 6426, 16492],
[10608, 16072, 5464],
[10746, 11213, 21959]],
[[23305, 6816, 16489],
[ 3792, 19190, 15398],
[13718, 18243, 31961]]])
Raveled [14472 3552 10920 12230 25307 13077 9707 14408 24115 22918 6426 16492
10608 16072 5464 10746 11213 21959 23305 6816 16489 3792 19190 15398
13718 18243 31961]"""
lp_wrapper = lp(pe.compute_parameters)
lp_wrapper()
#for variable in pe.amalgamated_cims_struct.sets_of_cims:
#for cond in variable.get_cims():
#print(cond.cim)
print(pe.amalgamated_cims_struct.get_cims_of_node(1,[2]))
lp.print_stats()

@ -38,4 +38,11 @@ class SamplePath:
return len(self.trajectories)
"""os.getcwd()
os.chdir('..')
path = os.getcwd() + '/data'
s1 = SamplePath(path)
s1.build_trajectories()
s1.build_structure()
print(s1.trajectories[0].get_complete_trajectory())"""

@ -17,7 +17,9 @@ class SetOfCims:
self.node_id = node_id
self.parents_states_number = parents_states_number
self.node_states_number = node_states_number
self.actual_cims = None
self.actual_cims = []
self.state_residence_times = None
self.transition_matrices = None
self.build_actual_cims_structure()
def build_actual_cims_structure(self):
@ -25,13 +27,19 @@ class SetOfCims:
#for state_number in self.parents_states_number:
#cims_number = cims_number * state_number
if not self.parents_states_number:
self.actual_cims = np.empty(1, dtype=cim.ConditionalIntensityMatrix)
self.actual_cims[0] = cim.ConditionalIntensityMatrix(self.node_states_number)
#self.actual_cims = np.empty(1, dtype=cim.ConditionalIntensityMatrix)
#self.actual_cims[0] = cim.ConditionalIntensityMatrix(self.node_states_number)
self.state_residence_times = np.zeros((1, self.node_states_number), dtype=np.float)
self.transition_matrices = np.zeros((1,self.node_states_number, self.node_states_number), dtype=np.int)
else:
self.actual_cims = np.empty(self.parents_states_number, dtype=cim.ConditionalIntensityMatrix)
self.build_actual_cims(self.actual_cims)
#self.actual_cims = np.empty(self.parents_states_number, dtype=cim.ConditionalIntensityMatrix)
#self.build_actual_cims(self.actual_cims)
#for indx, matrix in enumerate(self.actual_cims):
#self.actual_cims[indx] = cim.ConditionalIntensityMatrix(self.node_states_number)
self.state_residence_times = \
np.zeros((np.prod(self.parents_states_number), self.node_states_number), dtype=np.float)
self.transition_matrices = np.zeros([np.prod(self.parents_states_number), self.node_states_number,
self.node_states_number], dtype=np.int)
def update_state_transition(self, indexes, element_indx_tuple):
#matrix_indx = self.indexes_converter(indexes)
@ -63,17 +71,30 @@ class SetOfCims:
def indexes_converter(self, indexes): # Si aspetta array del tipo [2,2] dove
#print(type(indexes))
#print(indexes)
#print(type(bases))
assert len(indexes) == len(self.parents_states_number)
vector_index = 0
if indexes.size == 0:
if not indexes:
return vector_index
else:
for indx, value in enumerate(indexes):
vector_index = vector_index*self.parents_states_number[indx] + indexes[indx]
return vector_index
def build_cims(self, state_res_times, transition_matrices):
for state_res_time_vector, transition_matrix in zip(state_res_times, transition_matrices):
#print(state_res_time_vector, transition_matrix)
cim_to_add = cim.ConditionalIntensityMatrix(self.node_states_number,
state_res_time_vector, transition_matrix)
cim_to_add.compute_cim_coefficients()
#print(cim_to_add)
self.actual_cims.append(cim_to_add)
def get_cims(self):
return self.actual_cims
def get_cim(self, index):
flat_index = self.indexes_converter(index)
return self.actual_cims[flat_index]
"""sofc = SetOfCims('Z', [3, 3], 3)
@ -82,6 +103,3 @@ print(sofc.actual_cims)
print(sofc.actual_cims[0,0])
print(sofc.actual_cims[1,2])
#print(sofc.indexes_converter([]))"""

@ -31,3 +31,6 @@ class Structure:
def get_states_number(self, node):
return self.variables_frame['Value'][self.get_node_indx(node)]
def get_states_number_by_indx(self, node_indx):
return self.variables_frame['Value'][node_indx]

@ -16,10 +16,12 @@ class Trajectory():
def __init__(self, list_of_columns):
self.actual_trajectory = np.array(list_of_columns[1:], dtype=np.int).T
self.transitions = np.array(list_of_columns[4:], dtype=np.int).T
self.times = np.array(list_of_columns[0], dtype=np.float)
def get_trajectory(self):
return self.actual_trajectory[:,:4]
def get_complete_trajectory(self):
return self.actual_trajectory
def get_times(self):
@ -28,6 +30,4 @@ class Trajectory():
def size(self):
return self.actual_trajectory.shape[0]
def merge_columns(self, list_of_cols):
return np.vstack(list_of_cols).T