1
0
Fork 0

Updated all dependencies to the latest version (except for `networkx` updated to `2.8.8`), updated code to reflect the changes

master
Meliurwen 2 years ago
parent 0bfca02a61
commit df3225c288
Signed by: meliurwen
GPG Key ID: 818A8B35E9F1CE10
  1. 2
      doc-requirements.txt
  2. 4
      lint-requirements.txt
  3. 12
      setup-requirements.txt
  4. 9
      src/pyctbn/legacy/estimators/parameters_estimator.py
  5. 2
      src/pyctbn/legacy/optimizers/tabu_search.py
  6. 3
      src/pyctbn/legacy/structure_graph/conditional_intensity_matrix.py
  7. 10
      src/pyctbn/legacy/structure_graph/network_graph.py
  8. 8
      src/pyctbn/legacy/structure_graph/set_of_cims.py
  9. 30
      src/pyctbn/legacy/utility/json_importer.py
  10. 10
      test-requirements.txt
  11. 4
      tests/estimators/test_structure_estimator.py
  12. 4
      tests/structure_graph/test_cim.py
  13. 2
      tests/structure_graph/test_setofcims.py

@ -1,2 +1,2 @@
sphinx==3.3.1
sphinx==6.1.3
sphinx-pdj-theme==0.2.1

@ -1,2 +1,2 @@
pylint==2.7.2
flake8==3.9.0
pylint==2.16.0
flake8==6.0.0

@ -1,6 +1,6 @@
matplotlib==3.3.3
networkx==2.5
numpy==1.19.4
pandas==1.1.5
scipy==1.5.4
tqdm==4.54.1
matplotlib==3.6.3
networkx==2.8.8
numpy==1.24.1
pandas==1.5.3
scipy==1.10.0
tqdm==4.64.1

@ -5,6 +5,7 @@
import sys
sys.path.append('../')
import numpy as np
from numpy import int64
from ..structure_graph.network_graph import NetworkGraph
from ..structure_graph.set_of_cims import SetOfCims
@ -83,7 +84,7 @@ class ParametersEstimator(object):
:type T: numpy.ndArray
"""
T[:] = np.bincount(np.sum(trajectory[:, cols_filter] * scalar_indexes_struct / scalar_indexes_struct[0], axis=1)
.astype(np.int), \
.astype(int), \
times,
minlength=scalar_indexes_struct[-1]).reshape(-1, T.shape[1])
@ -104,9 +105,9 @@ class ParametersEstimator(object):
:type M: numpy.ndArray
"""
diag_indices = np.array([x * M.shape[1] + x % M.shape[1] for x in range(M.shape[0] * M.shape[1])],
dtype=np.int64)
trj_tmp = trajectory[trajectory[:, int(trajectory.shape[1] / 2) + node_indx].astype(np.int) >= 0]
M[:] = np.bincount(np.sum(trj_tmp[:, cols_filter] * scalar_indexing / scalar_indexing[0], axis=1).astype(np.int)
dtype=int64)
trj_tmp = trajectory[trajectory[:, int(trajectory.shape[1] / 2) + node_indx].astype(int) >= 0]
M[:] = np.bincount(np.sum(trj_tmp[:, cols_filter] * scalar_indexing / scalar_indexing[0], axis=1).astype(int)
, minlength=scalar_indexing[-1]).reshape(-1, M.shape[1], M.shape[2])
M_raveled = M.ravel()
M_raveled[diag_indices] = 0

@ -122,7 +122,7 @@ class TabuSearch(Optimizer):
'choose a new random edge according to tabu restiction'
if(len(current_possible_nodes) > 0):
current_new_parent = sample(current_possible_nodes,k=1)[0]
current_new_parent = sample(list(current_possible_nodes),k=1)[0]
else:
current_new_parent = tabu_queue.get()
tabu_set.remove(current_new_parent)

@ -3,6 +3,7 @@
# License: MIT License
import numpy as np
from numpy import float64
class ConditionalIntensityMatrix(object):
@ -24,7 +25,7 @@ class ConditionalIntensityMatrix(object):
if cim is not None:
self._cim = cim
else:
self._cim = self.state_transition_matrix.astype(np.float64)
self._cim = self.state_transition_matrix.astype(float64)
def compute_cim_coefficients(self) -> None:
"""Compute the coefficients of the matrix _cim by using the following equality q_xx' = M[x, x'] / T[x].

@ -164,7 +164,7 @@ class NetworkGraph(object):
"""
T_vector = np.array([node_states])
T_vector = np.append(T_vector, parents_vals)
T_vector = T_vector.cumprod().astype(np.int)
T_vector = T_vector.cumprod().astype(int)
return T_vector
@staticmethod
@ -182,7 +182,7 @@ class NetworkGraph(object):
M_vector = np.array([node_states_number,
node_states_number])
M_vector = np.append(M_vector, parents_vals)
M_vector = M_vector.cumprod().astype(np.int)
M_vector = M_vector.cumprod().astype(int)
return M_vector
@staticmethod
@ -198,7 +198,7 @@ class NetworkGraph(object):
:return: The filtering structure for times estimation
:rtype: numpy.ndArray
"""
return np.append(np.array([node_indx], dtype=np.int), p_indxs).astype(np.int)
return np.append(np.array([node_indx], dtype=int), p_indxs).astype(int)
@staticmethod
def build_transition_filtering_for_a_node(node_indx: int, p_indxs: typing.List, nodes_number: int) \
@ -215,7 +215,7 @@ class NetworkGraph(object):
:return: The filtering structure for transitions estimation
:rtype: numpy.ndArray
"""
return np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int)
return np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=int)
@staticmethod
def build_p_comb_structure_for_a_node(parents_values: typing.List) -> np.ndarray:
@ -238,7 +238,7 @@ class NetworkGraph(object):
parents_comb[:, 1] = parents_comb[:, 0].copy()
parents_comb[:, 0] = tmp_comb
else:
parents_comb = np.array([[]], dtype=np.int)
parents_comb = np.array([[]], dtype=int)
return parents_comb
def get_parents_by_id(self, node_id) -> typing.List:

@ -48,13 +48,13 @@ class SetOfCims(object):
"""Initializes at the correct dimensions the state residence times matrix and the state transition matrices.
"""
if not self._parents_states_number:
self._state_residence_times = np.zeros((1, self._node_states_number), dtype=np.float)
self._transition_matrices = np.zeros((1, self._node_states_number, self._node_states_number), dtype=np.int)
self._state_residence_times = np.zeros((1, self._node_states_number), dtype=float)
self._transition_matrices = np.zeros((1, self._node_states_number, self._node_states_number), dtype=int)
else:
self._state_residence_times = \
np.zeros((np.prod(self._parents_states_number), self._node_states_number), dtype=np.float)
np.zeros((np.prod(self._parents_states_number), self._node_states_number), dtype=float)
self._transition_matrices = np.zeros([np.prod(self._parents_states_number), self._node_states_number,
self._node_states_number], dtype=np.int)
self._node_states_number], dtype=int)
def build_cims(self, state_res_times: np.ndarray, transition_matrices: np.ndarray) -> None:
"""Build the ``ConditionalIntensityMatrix`` objects given the state residence times and transitions matrices.

@ -10,20 +10,6 @@ import pandas as pd
from .abstract_importer import AbstractImporter
def read_json_file(file_path) -> typing.List:
"""Reads the JSON file in the path self.filePath.
:return: The contents of the json file
:rtype: List
"""
with open(file_path) as f:
data = json.load(f)
if (isinstance(data,list)):
return data
else:
return [data]
class JsonImporter(AbstractImporter):
"""Implements the abstracts methods of AbstractImporter and adds all the necessary methods to process and prepare
the data in json extension.
@ -65,7 +51,7 @@ class JsonImporter(AbstractImporter):
self._df_samples_list = None
self._array_indx = None
super(JsonImporter, self).__init__(file_path)
self._raw_data = read_json_file(file_path)
self._raw_data = self.read_json_file()
def import_data(self, indx: int = 0) -> None:
"""Implements the abstract method of :class:`AbstractImporter`.
@ -114,6 +100,20 @@ class JsonImporter(AbstractImporter):
"""
return self.one_level_normalizing(raw_data, self._array_indx, self._variables_label)
def read_json_file(self) -> typing.List:
"""Reads the JSON file in the path self.filePath.
:return: The contents of the json file
:rtype: List
"""
with open(self._file_path) as f:
data = json.load(f)
if (isinstance(data,list)):
return data
else:
return [data]
def one_level_normalizing(self, raw_data: typing.List, indx: int, key: str) -> pd.DataFrame:
"""Extracts the one-level nested data in the list ``raw_data`` at the index ``indx`` at the key ``key``.

@ -1,5 +1,5 @@
pytest-xdist==2.2.1
setuptools==54.1.2
pytest==6.2.2
pytest-cov==2.11.1
psutil==5.9.0
pytest-xdist==3.1.0
setuptools==67.1.0
pytest==7.2.1
pytest-cov==4.0.0
psutil==5.9.4

@ -92,7 +92,7 @@ class TestStructureEstimator(unittest.TestCase):
def test_save_results(self):
se1 = StructureConstraintBasedEstimator(self.s1, 0.1, 0.1)
se1.ctpc_algorithm()
file_name = './PyCTBN/tests/estimators/test_save.json'
file_name = './tests/estimators/test_save.json'
se1.save_results(file_name)
with open(file_name) as f:
js_graph = json.load(f)
@ -109,7 +109,7 @@ class TestStructureEstimator(unittest.TestCase):
def test_save_plot_estimated_graph(self):
se1 = StructureConstraintBasedEstimator(self.s1, 0.1, 0.1)
edges = se1.estimate_structure(disable_multiprocessing=True)
file_name = './PyCTBN/tests/estimators/test_plot.png'
file_name = './tests/estimators/test_plot.png'
se1.save_plot_estimated_structure_graph(file_name)
os.remove(file_name)

@ -25,13 +25,13 @@ class TestConditionalIntensityMatrix(unittest.TestCase):
state_transition_matrix = self.state_transition_matrix)
self.assertTrue(np.array_equal(self.state_res_times, c1.state_residence_times))
self.assertTrue(np.array_equal(self.state_transition_matrix, c1.state_transition_matrix))
self.assertEqual(c1.cim.dtype, np.float)
self.assertEqual(c1.cim.dtype, float)
self.assertEqual(self.state_transition_matrix.shape, c1.cim.shape)
def test_compute_cim_coefficients(self):
c1 = ConditionalIntensityMatrix(state_residence_times = self.state_res_times,
state_transition_matrix = self.state_transition_matrix)
c2 = self.state_transition_matrix.astype(np.float)
c2 = self.state_transition_matrix.astype(float)
np.fill_diagonal(c2, c2.diagonal() * -1)
for i in range(0, len(self.state_res_times)):
for j in range(0, len(self.state_res_times)):

@ -124,7 +124,7 @@ class TestSetOfCims(unittest.TestCase):
parents_comb[:, 1] = parents_comb[:, 0].copy()
parents_comb[:, 0] = tmp_comb
else:
parents_comb = np.array([[]], dtype=np.int)
parents_comb = np.array([[]], dtype=int)
return parents_comb
def another_filtering_method(self,p_combs, mask, parent_value):