1
0
Fork 0

Refactor SamplePath class

parallel_struct_est
philpMartin 4 years ago
parent 7edfb6c962
commit 5c729eea55
  1. 23
      main_package/classes/abstract_importer.py
  2. 37
      main_package/classes/abstract_sample_path.py
  3. 26
      main_package/classes/json_importer.py
  4. 16
      main_package/classes/network_graph.py
  5. 26
      main_package/classes/parameters_estimator.py
  6. 28
      main_package/classes/sample_path.py
  7. 21
      main_package/classes/set_of_cims.py
  8. 19
      main_package/classes/structure.py
  9. 19
      main_package/classes/structure_estimator.py
  10. 4
      main_package/classes/trajectory.py
  11. 32
      main_package/tests/test_json_importer.py
  12. 7
      main_package/tests/test_networkgraph.py
  13. 15
      main_package/tests/test_parameters_estimator.py
  14. 10
      main_package/tests/test_sample_path.py
  15. 63
      main_package/tests/test_structure.py
  16. 18
      main_package/tests/test_structure_estimator.py

@ -3,34 +3,21 @@ from abc import ABC, abstractmethod
class AbstractImporter(ABC):
"""
Interfaccia che espone i metodi necessari all'importing delle trajectories e della struttura della CTBN
Interface that exposes all the necessary methods to import the trajectories and the net structure.
:files_path: il path in cui sono presenti i/il file da importare
:file_path: the file path
"""
def __init__(self, files_path):
self.files_path = files_path
def __init__(self, file_path: str):
self.file_path = file_path
super().__init__()
@abstractmethod
def import_trajectories(self, raw_data):
"""
Costruisce le traj partendo dal dataset raw_data
Parameters:
raw_data: il dataset da cui estrarre le traj
Returns:
void
"""
pass
@abstractmethod
def import_structure(self, raw_data):
"""
Costruisce la struttura della rete partendo dal dataset raw_data
Parameters:
raw_data: il dataset da cui estrarre la struttura
Returns:
void
"""
pass

@ -0,0 +1,37 @@
from abc import ABC, abstractmethod
import abstract_importer as ai
class AbstractSamplePath(ABC):
def __init__(self, importer: ai.AbstractImporter):
self.importer = importer
self._trajectories = None
self._structure = None
super().__init__()
@abstractmethod
def build_trajectories(self):
"""
Builds the Trajectory object that will contain all the trajectories.
Assigns the Trajectoriy object to the instance attribute _trajectories
Clears all the unused dataframes in Importer Object
Parameters:
void
Returns:
void
"""
pass
@abstractmethod
def build_structure(self):
"""
Builds the Structure object that aggregates all the infos about the net.
Assigns the Structure object to the instance attribuite _structure
Parameters:
void
Returns:
void
"""
pass

@ -15,7 +15,7 @@ class JsonImporter(AbstractImporter):
|_ dyn.str
|_ samples
|_ variabels
:files_path: the path that contains tha data to be imported
:file_path: the path of the file that contains tha data to be imported
:samples_label: the reference key for the samples in the trajectories
:structure_label: the reference key for the structure of the network data
:variables_label: the reference key for the cardinalites of the nodes data
@ -28,7 +28,7 @@ class JsonImporter(AbstractImporter):
:sorter: the columns header(excluding the time column) of the Dataframe concatenated_samples
"""
def __init__(self, files_path: str, samples_label: str, structure_label: str, variables_label: str, time_key: str,
def __init__(self, file_path: str, samples_label: str, structure_label: str, variables_label: str, time_key: str,
variables_key: str):
self.samples_label = samples_label
self.structure_label = structure_label
@ -40,7 +40,7 @@ class JsonImporter(AbstractImporter):
self._df_variables = pd.DataFrame()
self._concatenated_samples = None
self.sorter = None
super(JsonImporter, self).__init__(files_path)
super(JsonImporter, self).__init__(file_path)
def import_data(self):
"""
@ -110,15 +110,15 @@ class JsonImporter(AbstractImporter):
data: the contents of the json file
"""
try:
read_files = glob.glob(os.path.join(self.files_path, "*.json"))
if not read_files:
raise ValueError('No .json file found in the entered path!')
with open(read_files[0]) as f:
data = json.load(f)
return data
except ValueError as err:
print(err.args)
#try:
#read_files = glob.glob(os.path.join(self.files_path, "*.json"))
#if not read_files:
#raise ValueError('No .json file found in the entered path!')
with open(self.file_path) as f:
data = json.load(f)
return data
#except ValueError as err:
#print(err.args)
def one_level_normalizing(self, raw_data: typing.List, indx: int, key: str) -> pd.DataFrame:
"""
@ -207,13 +207,11 @@ class JsonImporter(AbstractImporter):
def build_list_of_samples_array(self, data_frame: pd.DataFrame) -> typing.List:
"""
Costruisce una lista contenente le colonne presenti nel dataframe data_frame convertendole in numpy_array
Builds a List containing the columns of dataframe and converts them to a numpy array.
Parameters:
:data_frame: the dataframe from which the columns have to be extracted and converted
Returns:
:columns_list: the resulting list of numpy arrays
"""
columns_list = [data_frame[column].to_numpy() for column in data_frame]
#for column in data_frame:

@ -136,7 +136,7 @@ class NetworkGraph:
fancy_indx = [i[1] for i in self.aggregated_info_about_nodes_parents]
return fancy_indx
def build_time_scalar_indexing_structure_for_a_node(self, node_id: str, parents_vals: typing.List):
def build_time_scalar_indexing_structure_for_a_node(self, node_id: str, parents_vals: typing.List) -> np.ndarray:
"""
Builds an indexing structure for the computation of state residence times values.
@ -153,7 +153,7 @@ class NetworkGraph:
return T_vector
def build_transition_scalar_indexing_structure_for_a_node(self, node_id: str, parents_vals: typing.List):
def build_transition_scalar_indexing_structure_for_a_node(self, node_id: str, parents_vals: typing.List) -> np.ndarray:
"""
Builds an indexing structure for the computation of state transitions values.
@ -171,7 +171,7 @@ class NetworkGraph:
M_vector = M_vector.cumprod().astype(np.int)
return M_vector
def build_time_columns_filtering_for_a_node(self, node_indx: int, p_indxs: typing.List):
def build_time_columns_filtering_for_a_node(self, node_indx: int, p_indxs: typing.List) -> np.ndarray:
"""
Builds the necessary structure to filter the desired columns indicated by node_indx and p_indxs in the dataset.
This structute will be used in the computation of the state res times.
@ -183,7 +183,7 @@ class NetworkGraph:
"""
return np.append(np.array([node_indx], dtype=np.int), p_indxs).astype(np.int)
def build_transition_filtering_for_a_node(self, node_indx, p_indxs):
def build_transition_filtering_for_a_node(self, node_indx, p_indxs) -> np.ndarray:
"""
Builds the necessary structure to filter the desired columns indicated by node_indx and p_indxs in the dataset.
This structute will be used in the computation of the state transitions values.
@ -196,7 +196,7 @@ class NetworkGraph:
nodes_number = self.graph_struct.total_variables_number
return np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int)
def build_p_comb_structure_for_a_node(self, parents_values: typing.List):
def build_p_comb_structure_for_a_node(self, parents_values: typing.List) -> np.ndarray:
"""
Builds the combinatory structure that contains the combinations of all the values contained in parents_values.
@ -252,12 +252,6 @@ class NetworkGraph:
def get_states_number(self, node_id):
return self.graph.nodes[node_id]['val']
"""
def get_states_number_by_indx(self, node_indx):
return self.graph_struct.get_states_number_by_indx(node_indx)
def get_node_by_index(self, node_indx):
return self.graph_struct.get_node_id(node_indx)"""
def get_node_indx(self, node_id):
return nx.get_node_attributes(self.graph, 'indx')[node_id]

@ -62,30 +62,8 @@ class ParametersEstimator:
self.net_graph.transition_scalar_indexing_structure[indx],
aggr[1].transition_matrices)
aggr[1].build_cims(aggr[1].state_residence_times, aggr[1].transition_matrices)
"""
def compute_parameters_for_node(self, node_id):
pos_index = self.net_graph.get_positional_node_indx(node_id)
node_indx = self.net_graph.get_node_indx(node_id)
state_res_times = self.sets_of_cims_struct.sets_of_cims[pos_index].state_residence_times
transition_matrices = self.sets_of_cims_struct.sets_of_cims[pos_index].transition_matrices
#print("Nodes", self.net_graph.get_nodes())
self.compute_state_res_time_for_node(node_indx, self.sample_path.trajectories.times,
self.sample_path.trajectories.trajectory,
self.net_graph.time_filtering[pos_index],
self.net_graph.time_scalar_indexing_strucure[pos_index],
state_res_times)
# print(self.net_graph.transition_filtering[indx])
# print(self.net_graph.transition_scalar_indexing_structure[indx])
self.compute_state_transitions_for_a_node(node_indx,
self.sample_path.trajectories.complete_trajectory,
self.net_graph.transition_filtering[pos_index],
self.net_graph.transition_scalar_indexing_structure[pos_index],
transition_matrices)
self.sets_of_cims_struct.sets_of_cims[pos_index].build_cims(
state_res_times,
transition_matrices) #TODO potrebbe restituire direttamente l'oggetto setof cims
"""
def compute_parameters_for_node(self, node_id: str):
def compute_parameters_for_node(self, node_id: str) -> sofc.SetOfCims:
"""
Compute the CIMS of the node identified by the label node_id

@ -1,41 +1,35 @@
import abstract_sample_path as asam
import json_importer as imp
import trajectory as tr
import structure as st
class SamplePath:
class SamplePath(asam.AbstractSamplePath):
"""
Aggregates all the informations about the trajectories, the real structure of the sampled net and variables
cardinalites.
Has the task of creating the objects that will contain the mentioned data.
:files_path: the path that contains tha data to be imported
:samples_label: the reference key for the samples in the trajectories
:structure_label: the reference key for the structure of the network data
:variables_label: the reference key for the cardinalites of the nodes data
:time_key: the key used to identify the timestamps in each trajectory
:variables_key: the key used to identify the names of the variables in the net
:importer: the Importer objects that will import ad process data
:trajectories: the Trajectory object that will contain all the concatenated trajectories
:structure: the Structure Object that will contain all the structurral infos about the net
:total_variables_count: the number of variables in the net
"""
def __init__(self, files_path: str, samples_label: str, structure_label: str, variables_label: str, time_key: str,
variables_key: str):
self.importer = imp.JsonImporter(files_path, samples_label, structure_label,
variables_label, time_key, variables_key)
self._trajectories = None
self._structure = None
#def __init__(self, files_path: str, samples_label: str, structure_label: str, variables_label: str, time_key: str,
#variables_key: str):
def __init__(self, importer: imp.JsonImporter):
#self.importer =importer
super().__init__(importer)
#self._trajectories = None
#self._structure = None
self.total_variables_count = None
def build_trajectories(self):
"""
Builds the Trajectory object that will contain all the trajectories.
Clears all the unsed dataframes in Importer Object
Clears all the unused dataframes in Importer Object
Parameters:
void

@ -46,6 +46,16 @@ class SetOfCims:
self.node_states_number], dtype=np.int)
def build_cims(self, state_res_times: typing.List, transition_matrices: typing.List):
"""
Build the ConditionalIntensityMatrix object given the state residence times and transitions matrices.
Compute the cim coefficients.
Parameters:
state_res_times: the state residence times matrix
transition_matrices: the transition matrices
Returns:
void
"""
for state_res_time_vector, transition_matrix in zip(state_res_times, transition_matrices):
cim_to_add = cim.ConditionalIntensityMatrix(state_res_time_vector, transition_matrix)
cim_to_add.compute_cim_coefficients()
@ -54,7 +64,16 @@ class SetOfCims:
self.transition_matrices = None
self.state_residence_times = None
def filter_cims_with_mask(self, mask_arr: np.ndarray, comb: typing.List):
def filter_cims_with_mask(self, mask_arr: np.ndarray, comb: typing.List) -> np.ndarray:
"""
Filter the cims contained in the array actual_cims given the boolean mask mask_arr and the index comb.
Parameters:
mask_arr: the boolean mask
comb: the indexes of the selected cims
Returns:
Array of ConditionalIntensityMatrix
"""
if mask_arr.size <= 1:
return self.actual_cims
else:

@ -13,7 +13,7 @@ class Structure:
:total_variables_number: the total number of variables in the net
"""
def __init__(self, nodes_label_list: ty.List, node_indexes_arr: np.array, nodes_vals_arr: np.array,
def __init__(self, nodes_label_list: ty.List, node_indexes_arr: np.ndarray, nodes_vals_arr: np.ndarray,
edges_list: ty.List, total_variables_number: int):
self._nodes_labels_list = nodes_label_list
self._nodes_indexes_arr = node_indexes_arr
@ -32,36 +32,31 @@ class Structure:
return self._nodes_labels_list
@property
def nodes_indexes(self):
def nodes_indexes(self) -> np.ndarray:
return self._nodes_indexes_arr
@property
def nodes_values(self):
def nodes_values(self) -> np.ndarray:
return self._nodes_vals_arr
@property
def total_variables_number(self):
return self._total_variables_number
def get_node_id(self, node_indx: int):
def get_node_id(self, node_indx: int) -> str:
return self._nodes_labels_list[node_indx]
def get_node_indx(self, node_id: str):
def get_node_indx(self, node_id: str) -> int:
pos_indx = self._nodes_labels_list.index(node_id)
return self._nodes_indexes_arr[pos_indx]
def get_positional_node_indx(self, node_id: str):
def get_positional_node_indx(self, node_id: str) -> int:
return self._nodes_labels_list.index(node_id)
def get_states_number(self, node: str):
def get_states_number(self, node: str) -> int:
pos_indx = self._nodes_labels_list.index(node)
return self._nodes_vals_arr[pos_indx]
def get_states_number_by_indx(self, node_indx: int):
#print(self.value_label)
#print("Node indx", node_indx)
return self._nodes_vals_arr[node_indx]
def __repr__(self):
return "Variables:\n" + str(self._nodes_labels_list) +"\nValues:\n"+ str(self._nodes_vals_arr) +\
"\nEdges: \n" + str(self._edges_list)

@ -2,10 +2,13 @@
import numpy as np
import itertools
import networkx as nx
from networkx.readwrite import json_graph
import json
import typing
from scipy.stats import f as f_dist
from scipy.stats import chi2 as chi2_dist
import sample_path as sp
import structure as st
import network_graph as ng
@ -289,6 +292,22 @@ class StructureEstimator:
total_vars_numb = self.sample_path.total_variables_count
[ctpc_algo(n, total_vars_numb) for n in self.nodes]
def save_results(self):
"""
Save the estimated Structure to a .json file
Parameters:
void
Returns:
void
"""
res = json_graph.node_link_data(self.complete_graph)
name = self.sample_path.importer.file_path.rsplit('/',1)[-1]
#print(name)
name = 'results_' + name
with open(name, 'w') as f:
json.dump(res, f)
def remove_diagonal_elements(self, matrix):
m = matrix.shape[0]

@ -22,7 +22,7 @@ class Trajectory:
self._times = np.array(list_of_columns[0], dtype=np.float)
@property
def trajectory(self):
def trajectory(self) -> np.ndarray:
"""
Parameters:
void
@ -32,7 +32,7 @@ class Trajectory:
return self._actual_trajectory[:, :self.original_cols_number]
@property
def complete_trajectory(self):
def complete_trajectory(self) -> np.ndarray:
"""
Parameters:
void

@ -1,6 +1,8 @@
import sys
sys.path.append("/Users/Zalum/Desktop/Tesi/CTBN_Project/main_package/classes/")
import unittest
import os
import glob
import numpy as np
import pandas as pd
import json_importer as ji
@ -13,15 +15,18 @@ import json
class TestJsonImporter(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.read_files = glob.glob(os.path.join('../data', "*.json"))
def test_init(self):
path = os.getcwd()
j1 = ji.JsonImporter(path, 'samples', 'dyn.str', 'variables', 'Time', 'Name')
j1 = ji.JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
self.assertEqual(j1.samples_label, 'samples')
self.assertEqual(j1.structure_label, 'dyn.str')
self.assertEqual(j1.variables_label, 'variables')
self.assertEqual(j1.time_key, 'Time')
self.assertEqual(j1.variables_key, 'Name')
self.assertEqual(j1.files_path, path)
self.assertEqual(j1.file_path, self.read_files[0])
self.assertFalse(j1.df_samples_list)
self.assertTrue(j1.variables.empty)
self.assertTrue(j1.structure.empty)
@ -33,6 +38,7 @@ class TestJsonImporter(unittest.TestCase):
with open('data.json', 'w') as f:
json.dump(data_set, f)
path = os.getcwd()
path = path + '/data.json'
j1 = ji.JsonImporter(path, '', '', '', '', '')
imported_data = j1.read_json_file()
self.assertTrue(self.ordered(data_set) == self.ordered(imported_data))
@ -40,11 +46,12 @@ class TestJsonImporter(unittest.TestCase):
def test_read_json_file_not_found(self):
path = os.getcwd()
path = path + '/data.json'
j1 = ji.JsonImporter(path, '', '', '', '', '')
self.assertIsNone(j1.read_json_file())
self.assertRaises(FileNotFoundError, j1.read_json_file)
def test_normalize_trajectories(self):
j1 = ji.JsonImporter('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
j1 = ji.JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
raw_data = j1.read_json_file()
#print(raw_data)
j1.normalize_trajectories(raw_data, 0, j1.samples_label)
@ -52,17 +59,17 @@ class TestJsonImporter(unittest.TestCase):
self.assertEqual(list(j1.df_samples_list[0].columns.values)[1:], j1.sorter)
def test_normalize_trajectories_wrong_indx(self):
j1 = ji.JsonImporter('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
j1 = ji.JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
raw_data = j1.read_json_file()
self.assertRaises(IndexError, j1.normalize_trajectories, raw_data, 474, j1.samples_label)
def test_normalize_trajectories_wrong_key(self):
j1 = ji.JsonImporter('../data', 'sample', 'dyn.str', 'variables', 'Time', 'Name')
j1 = ji.JsonImporter(self.read_files[0], 'sample', 'dyn.str', 'variables', 'Time', 'Name')
raw_data = j1.read_json_file()
self.assertRaises(KeyError, j1.normalize_trajectories, raw_data, 0, j1.samples_label)
def test_compute_row_delta_single_samples_frame(self):
j1 = ji.JsonImporter('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
j1 = ji.JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
raw_data = j1.read_json_file()
j1.normalize_trajectories(raw_data, 0, j1.samples_label)
sample_frame = j1.df_samples_list[0]
@ -75,7 +82,7 @@ class TestJsonImporter(unittest.TestCase):
self.assertEqual(sample_frame.shape[0] - 1, new_sample_frame.shape[0])
def test_compute_row_delta_in_all_frames(self):
j1 = ji.JsonImporter('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
j1 = ji.JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
raw_data = j1.read_json_file()
j1.import_trajectories(raw_data)
j1.compute_row_delta_in_all_samples_frames(j1.time_key)
@ -83,7 +90,7 @@ class TestJsonImporter(unittest.TestCase):
self.assertEqual(list(j1.concatenated_samples.columns.values)[0], j1.time_key)
def test_clear_data_frame_list(self):
j1 = ji.JsonImporter('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
j1 = ji.JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
raw_data = j1.read_json_file()
j1.import_trajectories(raw_data)
j1.compute_row_delta_in_all_samples_frames(j1.time_key)
@ -96,6 +103,7 @@ class TestJsonImporter(unittest.TestCase):
with open('data.json', 'w') as f:
json.dump(data_set, f)
path = os.getcwd()
path = path + '/data.json'
j1 = ji.JsonImporter(path, '', '', '', '', '')
raw_data = j1.read_json_file()
frame = pd.DataFrame(raw_data)
@ -108,14 +116,14 @@ class TestJsonImporter(unittest.TestCase):
os.remove('data.json')
def test_import_variables(self):
j1 = ji.JsonImporter('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
j1 = ji.JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
sorter = ['X', 'Y', 'Z']
raw_data = [{'variables':{"Name": ['Z', 'Y', 'X'], "value": [3, 3, 3]}}]
j1.import_variables(raw_data, sorter)
self.assertEqual(list(j1.variables[j1.variables_key]), sorter)
def test_import_data(self):
j1 = ji.JsonImporter('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
j1 = ji.JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
lp = LineProfiler()
lp_wrapper = lp(j1.import_data)

@ -1,4 +1,6 @@
import unittest
import glob
import os
import networkx as nx
import numpy as np
import itertools
@ -6,13 +8,16 @@ from line_profiler import LineProfiler
import sample_path as sp
import network_graph as ng
import json_importer as ji
class TestNetworkGraph(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.s1 = sp.SamplePath('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
cls.read_files = glob.glob(os.path.join('../data', "*.json"))
cls.importer = ji.JsonImporter(cls.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
cls.s1 = sp.SamplePath(cls.importer)
cls.s1.build_trajectories()
cls.s1.build_structure()

@ -1,20 +1,23 @@
import unittest
import numpy as np
import glob
import os
from line_profiler import LineProfiler
import network_graph as ng
import sample_path as sp
import set_of_cims as sofc
import sets_of_cims_container as scc
import parameters_estimator as pe
import json_importer as ji
#TODO bisogna trovare un modo per testare i metodi che stimano i tempi e le transizioni per i singoli nodi
class TestParametersEstimatior(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.s1 = sp.SamplePath('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
cls.read_files = glob.glob(os.path.join('../data', "*.json"))
cls.importer = ji.JsonImporter(cls.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
cls.s1 = sp.SamplePath(cls.importer)
cls.s1.build_trajectories()
cls.s1.build_structure()
cls.g1 = ng.NetworkGraph(cls.s1.structure)
@ -41,12 +44,12 @@ class TestParametersEstimatior(unittest.TestCase):
sofc1 = p1.compute_parameters_for_node(node)
sampled_cims = self.aux_import_sampled_cims('dyn.cims')
sc = list(sampled_cims.values())
print(sc[indx])
#print(sc[indx])
self.equality_of_cims_of_node(sc[indx], sofc1.actual_cims)
def equality_of_cims_of_node(self, sampled_cims, estimated_cims):
#print(sampled_cims)
print(estimated_cims)
#print(estimated_cims)
self.assertEqual(len(sampled_cims), len(estimated_cims))
for c1, c2 in zip(sampled_cims, estimated_cims):
self.cim_equality_test(c1, c2.cim)
@ -56,7 +59,7 @@ class TestParametersEstimatior(unittest.TestCase):
self.assertTrue(np.all(np.isclose(r1, r2, 1e-01, 1e-01) == True))
def aux_import_sampled_cims(self, cims_label):
i1 = ji.JsonImporter('../data', '', '', '', '', '')
i1 = ji.JsonImporter(self.read_files[0], '', '', '', '', '')
raw_data = i1.read_json_file()
return i1.import_sampled_cims(raw_data, 0, cims_label)

@ -1,4 +1,7 @@
import unittest
import glob
import os
import json_importer as ji
import sample_path as sp
import trajectory as tr
import structure as st
@ -6,8 +9,13 @@ import structure as st
class TestSamplePath(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.read_files = glob.glob(os.path.join('../data', "*.json"))
cls.importer = ji.JsonImporter(cls.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
def test_init(self):
s1 = sp.SamplePath('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
s1 = sp.SamplePath(self.importer)
s1.build_trajectories()
self.assertIsNotNone(s1.trajectories)
self.assertIsInstance(s1.trajectories, tr.Trajectory)

@ -1,13 +1,8 @@
import sys
sys.path.append("/Users/Zalum/Desktop/Tesi/CTBN_Project/main_package/classes/")
import unittest
import pandas as pd
import numpy as np
import sample_path as sp
import structure as st
import network_graph as ng
import parameters_estimator as pe
class TestStructure(unittest.TestCase):
@ -22,7 +17,9 @@ class TestStructure(unittest.TestCase):
def test_init(self):
s1 = st.Structure(self.labels, self.indxs, self.vals, self.edges, self.vars_numb)
self.assertListEqual(self.labels,s1.nodes_labels)
self.assertIsInstance(s1.nodes_indexes, np.ndarray)
self.assertTrue(np.array_equal(self.indxs, s1.nodes_indexes))
self.assertIsInstance(s1.nodes_values, np.ndarray)
self.assertTrue(np.array_equal(self.vals, s1.nodes_values))
self.assertListEqual(self.edges, s1.edges)
self.assertEqual(self.vars_numb, s1.total_variables_number)
@ -70,62 +67,6 @@ class TestStructure(unittest.TestCase):
s1 = st.Structure(l2, i2, v2, e2, n2)
for val, node in zip(v2, l2):
self.assertEqual(val, s1.get_states_number(node))
#TODO FORSE QUESTO TEST NON serve verificare se questo metodo sia davvero utile
"""def test_get_states_numeber_by_indx(self):
s1 = st.Structure(self.structure_frame, self.variables_frame, len(self.variables_frame.index))
for indx, row in self.variables_frame.iterrows():
self.assertEqual(row[1], s1.get_states_number_by_indx(indx))
def test_new_init(self):
#self.variables_frame.drop(self.variables_frame[(self.variables_frame['Name'] == 'Y')].index, inplace=True)
labels = self.variables_frame['Name'].to_list()
indxs = self.variables_frame.index.to_numpy()
vals = self.variables_frame['Value'].to_numpy()
edges = list(self.structure_frame.to_records(index=False))
print(labels)
print(indxs)
print(vals)
print(edges)
s1 = st.Structure(labels, indxs, vals, edges, len(self.variables_frame.index))
#print(s1.get_node_id(2))
print(s1.get_node_indx('Z'))
print(s1.get_positional_node_indx('Z'))
print(s1.get_states_number('Z'))
print(s1.get_states_number_by_indx(1))
[CIM:
[[-4.82318981 1.18421625 3.63997346]
[ 4.44726473 -9.20141291 4.755239 ]
[ 2.93950444 4.36292948 -7.30152554]], CIM:
[[-6.0336893 1.69212904 4.34235011]
[ 3.32692085 -5.03977237 1.7137923 ]
[ 3.65519241 3.81402509 -7.46819716]], CIM:
[[-6.78778897 1.98559721 4.80306557]
[ 1.23811008 -6.26366842 5.0265376 ]
[ 3.02720526 4.0256821 -7.05222539]]]
array([ 3, 9, 27])
array([3, 9])
array([1, 2])
array([4, 1, 2])
sp1 = sp.SamplePath('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
sp1.build_trajectories()
sp1.build_structure()
st1 = st.Structure(['X','Y','Z'], np.array([0,1,2]), np.array([3,3,3]), [('Z','X'),('Y', 'X')], sp1.total_variables_count)
g1 = ng.NetworkGraph(st1)
g1.init_graph()
print("M Vector",g1.transition_scalar_indexing_structure)
print("Time Vecotr",g1.time_scalar_indexing_strucure)
print("Time Filter",g1.time_filtering)
print("M Filter",g1.transition_filtering)
print(g1.p_combs)
print("AGG STR", g1.aggregated_info_about_nodes_parents)
p1 = pe.ParametersEstimator(sp1,g1)
p1.init_sets_cims_container()
p1.compute_parameters_for_node('X')
#print(p1.sets_of_cims_struct.get_cims_of_node(0,[1,0]))
print(p1.sets_of_cims_struct.sets_of_cims[1].actual_cims)
#print(p1.sets_of_cims_struct.sets_of_cims[2].get_cims_where_parents_except_last_are_in_state(np.array([0])))
#print(p1.sets_of_cims_struct.sets_of_cims[0].p_combs)"""
if __name__ == '__main__':

@ -1,9 +1,13 @@
import unittest
import numpy as np
import networkx as nx
import glob
import os
import math
from line_profiler import LineProfiler
import psutil
import json_importer as ji
import sample_path as sp
import structure_estimator as se
import cache as ch
@ -13,7 +17,9 @@ class TestStructureEstimator(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.s1 = sp.SamplePath('../data', 'samples', 'dyn.str', 'variables', 'Time', 'Name')
cls.read_files = glob.glob(os.path.join('../data', "*.json"))
cls.importer = ji.JsonImporter(cls.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
cls.s1 = sp.SamplePath(cls.importer)
cls.s1.build_trajectories()
cls.s1.build_structure()
@ -58,7 +64,7 @@ class TestStructureEstimator(unittest.TestCase):
for sset in sets2:
self.assertFalse(node in sset)
def test_one_iteration(self):
def test_time(self):
se1 = se.StructureEstimator(self.s1, 0.1, 0.1)
lp = LineProfiler()
lp.add_function(se1.complete_test)
@ -76,8 +82,16 @@ class TestStructureEstimator(unittest.TestCase):
for ed in se1.complete_graph.edges:
if not(ed in tuples_edges):
spurious_edges.append(ed)
print("Spurious Edges:",spurious_edges)
se1.save_results()
def test_memory(self):
se1 = se.StructureEstimator(self.s1, 0.1, 0.1)
se1.ctpc_algorithm()
current_process = psutil.Process(os.getpid())
mem = current_process.memory_info().rss
print("Average Memory Usage in MB:", mem / 10**6)
if __name__ == '__main__':
unittest.main()