diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3e16c78 --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +__pycache__ +.vscode +**/__pycache__ +**/data +**/PyCTBN.egg-info +**/dist +**/results_data +**/.scannerwork \ No newline at end of file diff --git a/CTBN_project_dominio.pdf b/CTBN_project_dominio.pdf new file mode 100644 index 0000000..6bceea1 Binary files /dev/null and b/CTBN_project_dominio.pdf differ diff --git a/PyCTBN/PyCTBN/__init__.py b/PyCTBN/PyCTBN/__init__.py new file mode 100644 index 0000000..023c0f1 --- /dev/null +++ b/PyCTBN/PyCTBN/__init__.py @@ -0,0 +1,8 @@ +import PyCTBN.estimators +from PyCTBN.estimators import * +import PyCTBN.optimizers +from PyCTBN.optimizers import * +import PyCTBN.structure_graph +from PyCTBN.structure_graph import * +import PyCTBN.utility +from PyCTBN.utility import * \ No newline at end of file diff --git a/PyCTBN/PyCTBN/estimators/__init__.py b/PyCTBN/PyCTBN/estimators/__init__.py new file mode 100644 index 0000000..112086f --- /dev/null +++ b/PyCTBN/PyCTBN/estimators/__init__.py @@ -0,0 +1,5 @@ +from .fam_score_calculator import FamScoreCalculator +from .parameters_estimator import ParametersEstimator +from .structure_estimator import StructureEstimator +from .structure_constraint_based_estimator import StructureConstraintBasedEstimator +from .structure_score_based_estimator import StructureScoreBasedEstimator diff --git a/PyCTBN/PyCTBN/estimators/fam_score_calculator.py b/PyCTBN/PyCTBN/estimators/fam_score_calculator.py new file mode 100644 index 0000000..5b0b591 --- /dev/null +++ b/PyCTBN/PyCTBN/estimators/fam_score_calculator.py @@ -0,0 +1,272 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph + +from math import log + +from scipy.special import loggamma +from random import choice + +from ..structure_graph.set_of_cims import SetOfCims +from ..structure_graph.network_graph import NetworkGraph +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix + + +''' + +''' + + +class FamScoreCalculator: + """ + Has the task of calculating the FamScore of a node by using a Bayesian score function + """ + + def __init__(self): + #np.seterr('raise') + pass + + # region theta + + def marginal_likelihood_theta(self, + cims: ConditionalIntensityMatrix, + alpha_xu: float, + alpha_xxu: float): + """ + Calculate the FamScore value of the node identified by the label node_id + + :param cims: np.array with all the node's cims + :type cims: np.array + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type alpha_xu: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta + :rtype: float + """ + return np.sum( + [self.variable_cim_xu_marginal_likelihood_theta(cim, + alpha_xu, + alpha_xxu) + for cim in cims]) + + def variable_cim_xu_marginal_likelihood_theta(self, + cim: ConditionalIntensityMatrix, + alpha_xu: float, + alpha_xxu: float): + """ + Calculate the value of the marginal likelihood over theta given a cim + + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type alpha_xu: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta + :rtype: float + """ + + 'get cim length' + values = len(cim._state_residence_times) + + 'compute the marginal likelihood for the current cim' + return np.sum([ + self.single_cim_xu_marginal_likelihood_theta( + index, + cim, + alpha_xu, + alpha_xxu) + for index in range(values)]) + + def single_cim_xu_marginal_likelihood_theta(self, + index: int, + cim: ConditionalIntensityMatrix, + alpha_xu: float, + alpha_xxu: float): + """ + Calculate the marginal likelihood on q of the node when assumes a specif value + and a specif parents's assignment + + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta when the node assumes a specif value + :rtype: float + """ + + values = list(range(len(cim._state_residence_times))) + + 'remove the index because of the x != x^ condition in the summation ' + values.remove(index) + + 'uncomment for alpha xx not uniform' + #alpha_xxu = alpha_xu * cim.state_transition_matrix[index,index_x_first] / cim.state_transition_matrix[index, index]) + + return (loggamma(alpha_xu) - loggamma(alpha_xu + cim.state_transition_matrix[index, index])) \ + + \ + np.sum([self.single_internal_cim_xxu_marginal_likelihood_theta( + cim.state_transition_matrix[index,index_x_first], + alpha_xxu) + for index_x_first in values]) + + + def single_internal_cim_xxu_marginal_likelihood_theta(self, + M_xxu_suff_stats: float, + alpha_xxu: float=1): + """Calculate the second part of the marginal likelihood over theta formula + + :param M_xxu_suff_stats: value of the suffucient statistic M[xx'|u] + :type M_xxu_suff_stats: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta when the node assumes a specif value + :rtype: float + """ + return loggamma(alpha_xxu+M_xxu_suff_stats) - loggamma(alpha_xxu) + + # endregion + + # region q + + def marginal_likelihood_q(self, + cims: np.array, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the value of the marginal likelihood over q of the node identified by the label node_id + + :param cims: np.array with all the node's cims + :type cims: np.array + :param tau_xu: hyperparameter over the CTBN’s q parameters + :type tau_xu: float + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + + + :return: the value of the marginal likelihood over q + :rtype: float + """ + + return np.sum([self.variable_cim_xu_marginal_likelihood_q(cim, tau_xu, alpha_xu) for cim in cims]) + + def variable_cim_xu_marginal_likelihood_q(self, + cim: ConditionalIntensityMatrix, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the value of the marginal likelihood over q given a cim + + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param tau_xu: hyperparameter over the CTBN’s q parameters + :type tau_xu: float + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + + + :return: the value of the marginal likelihood over q + :rtype: float + """ + + 'get cim length' + values=len(cim._state_residence_times) + + 'compute the marginal likelihood for the current cim' + return np.sum([ + self.single_cim_xu_marginal_likelihood_q( + cim.state_transition_matrix[index, index], + cim._state_residence_times[index], + tau_xu, + alpha_xu) + for index in range(values)]) + + + def single_cim_xu_marginal_likelihood_q(self, + M_xu_suff_stats: float, + T_xu_suff_stats: float, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the marginal likelihood on q of the node when assumes a specif value + and a specif parents's assignment + + :param M_xu_suff_stats: value of the suffucient statistic M[x|u] + :type M_xxu_suff_stats: float + :param T_xu_suff_stats: value of the suffucient statistic T[x|u] + :type T_xu_suff_stats: float + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param tau_xu: hyperparameter over the CTBN’s q parameters + :type tau_xu: float + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + + + :return: the value of the marginal likelihood of the node when assumes a specif value + :rtype: float + """ + return ( + loggamma(alpha_xu + M_xu_suff_stats + 1) + + (log(tau_xu) + * + (alpha_xu+1)) + ) \ + - \ + (loggamma(alpha_xu + 1)+( + log(tau_xu + T_xu_suff_stats) + * + (alpha_xu + M_xu_suff_stats + 1)) + ) + + # end region + + def get_fam_score(self, + cims: np.array, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the FamScore value of the node + + + :param cims: np.array with all the node's cims + :type cims: np.array + :param tau_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type tau_xu: float, optional + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 1 + :type alpha_xu: float, optional + + + :return: the FamScore value of the node + :rtype: float + """ + #print("------") + #print(self.marginal_likelihood_q(cims, + # tau_xu, + # alpha_xu)) + + #print(self.marginal_likelihood_theta(cims, + # alpha_xu, + # alpha_xxu)) + 'calculate alpha_xxu as a uniform distribution' + alpha_xxu = alpha_xu /(len(cims[0]._state_residence_times) - 1) + + return self.marginal_likelihood_q(cims, + tau_xu, + alpha_xu) \ + + \ + self.marginal_likelihood_theta(cims, + alpha_xu, + alpha_xxu) diff --git a/PyCTBN/PyCTBN/estimators/parameters_estimator.py b/PyCTBN/PyCTBN/estimators/parameters_estimator.py new file mode 100644 index 0000000..4754d58 --- /dev/null +++ b/PyCTBN/PyCTBN/estimators/parameters_estimator.py @@ -0,0 +1,143 @@ +import sys +sys.path.append('../') +import numpy as np + +from ..structure_graph.network_graph import NetworkGraph +from ..structure_graph.set_of_cims import SetOfCims +from ..structure_graph.trajectory import Trajectory + + +class ParametersEstimator(object): + """Has the task of computing the cims of particular node given the trajectories and the net structure + in the graph ``_net_graph``. + + :param trajectories: the trajectories + :type trajectories: Trajectory + :param net_graph: the net structure + :type net_graph: NetworkGraph + :_single_set_of_cims: the set of cims object that will hold the cims of the node + """ + + def __init__(self, trajectories: Trajectory, net_graph: NetworkGraph): + """Constructor Method + """ + self._trajectories = trajectories + self._net_graph = net_graph + self._single_set_of_cims = None + + def fast_init(self, node_id: str) -> None: + """Initializes all the necessary structures for the parameters estimation for the node ``node_id``. + + :param node_id: the node label + :type node_id: string + """ + p_vals = self._net_graph._aggregated_info_about_nodes_parents[2] + node_states_number = self._net_graph.get_states_number(node_id) + self._single_set_of_cims = SetOfCims(node_id, p_vals, node_states_number, self._net_graph.p_combs) + + def compute_parameters_for_node(self, node_id: str) -> SetOfCims: + """Compute the CIMS of the node identified by the label ``node_id``. + + :param node_id: the node label + :type node_id: string + :return: A SetOfCims object filled with the computed CIMS + :rtype: SetOfCims + """ + node_indx = self._net_graph.get_node_indx(node_id) + state_res_times = self._single_set_of_cims._state_residence_times + transition_matrices = self._single_set_of_cims._transition_matrices + ParametersEstimator.compute_state_res_time_for_node(self._trajectories.times, + self._trajectories.trajectory, + self._net_graph.time_filtering, + self._net_graph.time_scalar_indexing_strucure, + state_res_times) + ParametersEstimator.compute_state_transitions_for_a_node(node_indx, self._trajectories.complete_trajectory, + self._net_graph.transition_filtering, + self._net_graph.transition_scalar_indexing_structure, + transition_matrices) + self._single_set_of_cims.build_cims(state_res_times, transition_matrices) + return self._single_set_of_cims + + @staticmethod + def compute_state_res_time_for_node(times: np.ndarray, trajectory: np.ndarray, + cols_filter: np.ndarray, scalar_indexes_struct: np.ndarray, + T: np.ndarray) -> None: + """Compute the state residence times for a node and fill the matrix ``T`` with the results + + :param node_indx: the index of the node + :type node_indx: int + :param times: the times deltas vector + :type times: numpy.array + :param trajectory: the trajectory + :type trajectory: numpy.ndArray + :param cols_filter: the columns filtering structure + :type cols_filter: numpy.array + :param scalar_indexes_struct: the indexing structure + :type scalar_indexes_struct: numpy.array + :param T: the state residence times vectors + :type T: numpy.ndArray + """ + T[:] = np.bincount(np.sum(trajectory[:, cols_filter] * scalar_indexes_struct / scalar_indexes_struct[0], axis=1) + .astype(np.int), \ + times, + minlength=scalar_indexes_struct[-1]).reshape(-1, T.shape[1]) + + @staticmethod + def compute_state_transitions_for_a_node(node_indx: int, trajectory: np.ndarray, cols_filter: np.ndarray, + scalar_indexing: np.ndarray, M: np.ndarray) -> None: + """Compute the state residence times for a node and fill the matrices ``M`` with the results. + + :param node_indx: the index of the node + :type node_indx: int + :param trajectory: the trajectory + :type trajectory: numpy.ndArray + :param cols_filter: the columns filtering structure + :type cols_filter: numpy.array + :param scalar_indexing: the indexing structure + :type scalar_indexing: numpy.array + :param M: the state transitions matrices + :type M: numpy.ndArray + """ + diag_indices = np.array([x * M.shape[1] + x % M.shape[1] for x in range(M.shape[0] * M.shape[1])], + dtype=np.int64) + trj_tmp = trajectory[trajectory[:, int(trajectory.shape[1] / 2) + node_indx].astype(np.int) >= 0] + M[:] = np.bincount(np.sum(trj_tmp[:, cols_filter] * scalar_indexing / scalar_indexing[0], axis=1).astype(np.int) + , minlength=scalar_indexing[-1]).reshape(-1, M.shape[1], M.shape[2]) + M_raveled = M.ravel() + M_raveled[diag_indices] = 0 + M_raveled[diag_indices] = np.sum(M, axis=2).ravel() + + def init_sets_cims_container(self): + self.sets_of_cims_struct = acims.SetsOfCimsContainer(self.net_graph.nodes, + self.net_graph.nodes_values, + self.net_graph.get_ordered_by_indx_parents_values_for_all_nodes(), + self.net_graph.p_combs) + + def compute_parameters(self): + #print(self.net_graph.get_nodes()) + #print(self.amalgamated_cims_struct.sets_of_cims) + #enumerate(zip(self.net_graph.get_nodes(), self.amalgamated_cims_struct.sets_of_cims)) + for indx, aggr in enumerate(zip(self.net_graph.nodes, self.sets_of_cims_struct.sets_of_cims)): + #print(self.net_graph.time_filtering[indx]) + #print(self.net_graph.time_scalar_indexing_strucure[indx]) + self.compute_state_res_time_for_node(self.net_graph.get_node_indx(aggr[0]), self.sample_path.trajectories.times, + self.sample_path.trajectories.trajectory, + self.net_graph.time_filtering[indx], + self.net_graph.time_scalar_indexing_strucure[indx], + aggr[1]._state_residence_times) + #print(self.net_graph.transition_filtering[indx]) + #print(self.net_graph.transition_scalar_indexing_structure[indx]) + self.compute_state_transitions_for_a_node(self.net_graph.get_node_indx(aggr[0]), + self.sample_path.trajectories.complete_trajectory, + self.net_graph.transition_filtering[indx], + self.net_graph.transition_scalar_indexing_structure[indx], + aggr[1]._transition_matrices) + aggr[1].build_cims(aggr[1]._state_residence_times, aggr[1]._transition_matrices) + + + + + + + + diff --git a/PyCTBN/PyCTBN/estimators/structure_constraint_based_estimator.py b/PyCTBN/PyCTBN/estimators/structure_constraint_based_estimator.py new file mode 100644 index 0000000..7d5721e --- /dev/null +++ b/PyCTBN/PyCTBN/estimators/structure_constraint_based_estimator.py @@ -0,0 +1,238 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph +import os +from scipy.stats import chi2 as chi2_dist +from scipy.stats import f as f_dist +from tqdm import tqdm + +from ..utility.cache import Cache +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix +from ..structure_graph.network_graph import NetworkGraph +from .parameters_estimator import ParametersEstimator +from .structure_estimator import StructureEstimator +from ..structure_graph.sample_path import SamplePath +from ..structure_graph.structure import Structure +from ..optimizers.constraint_based_optimizer import ConstraintBasedOptimizer + +import concurrent.futures + + + +import multiprocessing +from multiprocessing import Pool + + +class StructureConstraintBasedEstimator(StructureEstimator): + """ + Has the task of estimating the network structure given the trajectories in samplepath by using a constraint-based approach. + + :param sample_path: the _sample_path object containing the trajectories and the real structure + :type sample_path: SamplePath + :param exp_test_alfa: the significance level for the exponential Hp test + :type exp_test_alfa: float + :param chi_test_alfa: the significance level for the chi Hp test + :type chi_test_alfa: float + :_nodes: the nodes labels + :_nodes_vals: the nodes cardinalities + :_nodes_indxs: the nodes indexes + :_complete_graph: the complete directed graph built using the nodes labels in ``_nodes`` + :_cache: the Cache object + """ + + def __init__(self, sample_path: SamplePath, exp_test_alfa: float, chi_test_alfa: float,known_edges: typing.List= [],thumb_threshold:int = 25): + super().__init__(sample_path,known_edges) + self._exp_test_sign = exp_test_alfa + self._chi_test_alfa = chi_test_alfa + self._thumb_threshold = thumb_threshold + self._cache = Cache() + + def complete_test(self, test_parent: str, test_child: str, parent_set: typing.List, child_states_numb: int, + tot_vars_count: int, parent_indx, child_indx) -> bool: + """Performs a complete independence test on the directed graphs G1 = {test_child U parent_set} + G2 = {G1 U test_parent} (added as an additional parent of the test_child). + Generates all the necessary structures and datas to perform the tests. + + :param test_parent: the node label of the test parent + :type test_parent: string + :param test_child: the node label of the child + :type test_child: string + :param parent_set: the common parent set + :type parent_set: List + :param child_states_numb: the cardinality of the ``test_child`` + :type child_states_numb: int + :param tot_vars_count: the total number of variables in the net + :type tot_vars_count: int + :return: True iff test_child and test_parent are independent given the sep_set parent_set. False otherwise + :rtype: bool + """ + p_set = parent_set[:] + complete_info = parent_set[:] + complete_info.append(test_child) + + parents = np.array(parent_set) + parents = np.append(parents, test_parent) + sorted_parents = self._nodes[np.isin(self._nodes, parents)] + cims_filter = sorted_parents != test_parent + + p_set.insert(0, test_parent) + sofc2 = self._cache.find(set(p_set)) + + if not sofc2: + complete_info.append(test_parent) + bool_mask2 = np.isin(self._nodes, complete_info) + l2 = list(self._nodes[bool_mask2]) + indxs2 = self._nodes_indxs[bool_mask2] + vals2 = self._nodes_vals[bool_mask2] + eds2 = list(itertools.product(p_set, test_child)) + s2 = Structure(l2, indxs2, vals2, eds2, tot_vars_count) + g2 = NetworkGraph(s2) + g2.fast_init(test_child) + p2 = ParametersEstimator(self._sample_path.trajectories, g2) + p2.fast_init(test_child) + sofc2 = p2.compute_parameters_for_node(test_child) + self._cache.put(set(p_set), sofc2) + + del p_set[0] + sofc1 = self._cache.find(set(p_set)) + if not sofc1: + g2.remove_node(test_parent) + g2.fast_init(test_child) + p2 = ParametersEstimator(self._sample_path.trajectories, g2) + p2.fast_init(test_child) + sofc1 = p2.compute_parameters_for_node(test_child) + self._cache.put(set(p_set), sofc1) + thumb_value = 0.0 + if child_states_numb > 2: + parent_val = self._sample_path.structure.get_states_number(test_parent) + bool_mask_vals = np.isin(self._nodes, parent_set) + parents_vals = self._nodes_vals[bool_mask_vals] + thumb_value = self.compute_thumb_value(parent_val, child_states_numb, parents_vals) + for cim1, p_comb in zip(sofc1.actual_cims, sofc1.p_combs): + cond_cims = sofc2.filter_cims_with_mask(cims_filter, p_comb) + for cim2 in cond_cims: + if not self.independence_test(child_states_numb, cim1, cim2, thumb_value, parent_indx, child_indx): + return False + return True + + def independence_test(self, child_states_numb: int, cim1: ConditionalIntensityMatrix, + cim2: ConditionalIntensityMatrix, thumb_value: float, parent_indx, child_indx) -> bool: + """Compute the actual independence test using two cims. + It is performed first the exponential test and if the null hypothesis is not rejected, + it is performed also the chi_test. + + :param child_states_numb: the cardinality of the test child + :type child_states_numb: int + :param cim1: a cim belonging to the graph without test parent + :type cim1: ConditionalIntensityMatrix + :param cim2: a cim belonging to the graph with test parent + :type cim2: ConditionalIntensityMatrix + :return: True iff both tests do NOT reject the null hypothesis of independence. False otherwise. + :rtype: bool + """ + M1 = cim1.state_transition_matrix + M2 = cim2.state_transition_matrix + r1s = M1.diagonal() + r2s = M2.diagonal() + C1 = cim1.cim + C2 = cim2.cim + if child_states_numb > 2: + if (np.sum(np.diagonal(M1)) / thumb_value) < self._thumb_threshold: + self._removable_edges_matrix[parent_indx][child_indx] = False + return False + F_stats = C2.diagonal() / C1.diagonal() + exp_alfa = self._exp_test_sign + for val in range(0, child_states_numb): + if F_stats[val] < f_dist.ppf(exp_alfa / 2, r1s[val], r2s[val]) or \ + F_stats[val] > f_dist.ppf(1 - exp_alfa / 2, r1s[val], r2s[val]): + return False + M1_no_diag = M1[~np.eye(M1.shape[0], dtype=bool)].reshape(M1.shape[0], -1) + M2_no_diag = M2[~np.eye(M2.shape[0], dtype=bool)].reshape( + M2.shape[0], -1) + chi_2_quantile = chi2_dist.ppf(1 - self._chi_test_alfa, child_states_numb - 1) + Ks = np.sqrt(r1s / r2s) + Ls = np.sqrt(r2s / r1s) + for val in range(0, child_states_numb): + Chi = np.sum(np.power(Ks[val] * M2_no_diag[val] - Ls[val] *M1_no_diag[val], 2) / + (M1_no_diag[val] + M2_no_diag[val])) + if Chi > chi_2_quantile: + return False + return True + + def compute_thumb_value(self, parent_val, child_val, parent_set_vals): + """Compute the value to test against the thumb_threshold. + + :param parent_val: test parent's variable cardinality + :type parent_val: int + :param child_val: test child's variable cardinality + :type child_val: int + :param parent_set_vals: the cardinalities of the nodes in the current sep-set + :type parent_set_vals: List + :return: the thumb value for the current independence test + :rtype: int + """ + df = (child_val - 1) ** 2 + df = df * parent_val + for v in parent_set_vals: + df = df * v + return df + + def one_iteration_of_CTPC_algorithm(self, var_id: str, tot_vars_count: int)-> typing.List: + """Performs an iteration of the CTPC algorithm using the node ``var_id`` as ``test_child``. + + :param var_id: the node label of the test child + :type var_id: string + """ + optimizer_obj = ConstraintBasedOptimizer( + node_id = var_id, + structure_estimator = self, + tot_vars_count = tot_vars_count) + return optimizer_obj.optimize_structure() + + + def ctpc_algorithm(self,disable_multiprocessing:bool= False ): + """Compute the CTPC algorithm over the entire net. + """ + ctpc_algo = self.one_iteration_of_CTPC_algorithm + total_vars_numb = self._sample_path.total_variables_count + + n_nodes= len(self._nodes) + + total_vars_numb_array = [total_vars_numb] * n_nodes + + 'get the number of CPU' + cpu_count = multiprocessing.cpu_count() + + + + 'Remove all the edges from the structure' + self._sample_path.structure.clean_structure_edges() + + 'Estimate the best parents for each node' + #with multiprocessing.Pool(processes=cpu_count) as pool: + #with get_context("spawn").Pool(processes=cpu_count) as pool: + if disable_multiprocessing: + print("DISABILITATO") + cpu_count = 1 + list_edges_partial = [ctpc_algo(n,total_vars_numb) for n in self._nodes] + else: + with concurrent.futures.ProcessPoolExecutor(max_workers=cpu_count) as executor: + list_edges_partial = executor.map(ctpc_algo, + self._nodes, + total_vars_numb_array) + #list_edges_partial = [ctpc_algo(n,total_vars_numb) for n in self._nodes] + + return set(itertools.chain.from_iterable(list_edges_partial)) + + + def estimate_structure(self,disable_multiprocessing:bool=False): + return self.ctpc_algorithm(disable_multiprocessing=disable_multiprocessing) + + + + diff --git a/PyCTBN/PyCTBN/estimators/structure_estimator.py b/PyCTBN/PyCTBN/estimators/structure_estimator.py new file mode 100644 index 0000000..fbf8ea9 --- /dev/null +++ b/PyCTBN/PyCTBN/estimators/structure_estimator.py @@ -0,0 +1,187 @@ + +import itertools +import json +import typing + +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph + +from abc import ABC + +import abc + +from ..utility.cache import Cache +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix +from ..structure_graph.network_graph import NetworkGraph +from .parameters_estimator import ParametersEstimator +from ..structure_graph.sample_path import SamplePath +from ..structure_graph.structure import Structure + + +class StructureEstimator(object): + """Has the task of estimating the network structure given the trajectories in ``samplepath``. + + :param sample_path: the _sample_path object containing the trajectories and the real structure + :type sample_path: SamplePath + :_nodes: the nodes labels + :_nodes_vals: the nodes cardinalities + :_nodes_indxs: the nodes indexes + :_complete_graph: the complete directed graph built using the nodes labels in ``_nodes`` + """ + + def __init__(self, sample_path: SamplePath, known_edges: typing.List = None): + self._sample_path = sample_path + self._nodes = np.array(self._sample_path.structure.nodes_labels) + self._nodes_vals = self._sample_path.structure.nodes_values + self._nodes_indxs = self._sample_path.structure.nodes_indexes + self._removable_edges_matrix = self.build_removable_edges_matrix(known_edges) + self._complete_graph = StructureEstimator.build_complete_graph(self._sample_path.structure.nodes_labels) + + + def build_removable_edges_matrix(self, known_edges: typing.List): + """Builds a boolean matrix who shows if a edge could be removed or not, based on prior knowledge given: + + :param known_edges: the list of nodes labels + :type known_edges: List + :return: a boolean matrix + :rtype: np.ndarray + """ + tot_vars_count = self._sample_path.total_variables_count + complete_adj_matrix = np.full((tot_vars_count, tot_vars_count), True) + if known_edges: + for edge in known_edges: + i = self._sample_path.structure.get_node_indx(edge[0]) + j = self._sample_path.structure.get_node_indx(edge[1]) + complete_adj_matrix[i][j] = False + return complete_adj_matrix + + @staticmethod + def build_complete_graph(node_ids: typing.List) -> nx.DiGraph: + """Builds a complete directed graph (no self loops) given the nodes labels in the list ``node_ids``: + + :param node_ids: the list of nodes labels + :type node_ids: List + :return: a complete Digraph Object + :rtype: networkx.DiGraph + """ + complete_graph = nx.DiGraph() + complete_graph.add_nodes_from(node_ids) + complete_graph.add_edges_from(itertools.permutations(node_ids, 2)) + return complete_graph + + + @staticmethod + def generate_possible_sub_sets_of_size( u: typing.List, size: int, parent_label: str): + """Creates a list containing all possible subsets of the list ``u`` of size ``size``, + that do not contains a the node identified by ``parent_label``. + + :param u: the list of nodes + :type u: List + :param size: the size of the subsets + :type size: int + :param parent_label: the node to exclude in the subsets generation + :type parent_label: string + :return: an Iterator Object containing a list of lists + :rtype: Iterator + """ + list_without_test_parent = u[:] + list_without_test_parent.remove(parent_label) + return map(list, itertools.combinations(list_without_test_parent, size)) + + def save_results(self) -> None: + """Save the estimated Structure to a .json file in the path where the data are loaded from. + The file is named as the input dataset but the `results_` word is appended to the results file. + """ + res = json_graph.node_link_data(self._complete_graph) + name = self._sample_path._importer.file_path.rsplit('/', 1)[-1] + name = name.split('.', 1)[0] + name += '_' + str(self._sample_path._importer.dataset_id()) + name += '.json' + file_name = 'results_' + name + with open(file_name, 'w') as f: + json.dump(res, f) + + + def remove_diagonal_elements(self, matrix): + m = matrix.shape[0] + strided = np.lib.stride_tricks.as_strided + s0, s1 = matrix.strides + return strided(matrix.ravel()[1:], shape=(m - 1, m), strides=(s0 + s1, s1)).reshape(m, -1) + + + @abc.abstractmethod + def estimate_structure(self) -> typing.List: + """Abstract method to estimate the structure + + :return: List of estimated edges + :rtype: Typing.List + """ + pass + + + def adjacency_matrix(self) -> np.ndarray: + """Converts the estimated structure ``_complete_graph`` to a boolean adjacency matrix representation. + + :return: The adjacency matrix of the graph ``_complete_graph`` + :rtype: numpy.ndArray + """ + return nx.adj_matrix(self._complete_graph).toarray().astype(bool) + + def spurious_edges(self) -> typing.List: + """Return the spurious edges present in the estimated structure, if a prior net structure is present in + ``_sample_path.structure``. + + :return: A list containing the spurious edges + :rtype: List + """ + if not self._sample_path.has_prior_net_structure: + raise RuntimeError("Can not compute spurious edges with no prior net structure!") + real_graph = nx.DiGraph() + real_graph.add_nodes_from(self._sample_path.structure.nodes_labels) + real_graph.add_edges_from(self._sample_path.structure.edges) + return nx.difference(real_graph, self._complete_graph).edges + + def save_plot_estimated_structure_graph(self) -> None: + """Plot the estimated structure in a graphical model style. + Spurious edges are colored in red. + """ + graph_to_draw = nx.DiGraph() + spurious_edges = self.spurious_edges() + non_spurious_edges = list(set(self._complete_graph.edges) - set(spurious_edges)) + print(non_spurious_edges) + edges_colors = ['red' if edge in spurious_edges else 'black' for edge in self._complete_graph.edges] + graph_to_draw.add_edges_from(spurious_edges) + graph_to_draw.add_edges_from(non_spurious_edges) + pos = nx.spring_layout(graph_to_draw, k=0.5*1/np.sqrt(len(graph_to_draw.nodes())), iterations=50,scale=10) + options = { + "node_size": 2000, + "node_color": "white", + "edgecolors": "black", + 'linewidths':2, + "with_labels":True, + "font_size":13, + 'connectionstyle': 'arc3, rad = 0.1', + "arrowsize": 15, + "arrowstyle": '<|-', + "width": 1, + "edge_color":edges_colors, + } + + nx.draw(graph_to_draw, pos, **options) + ax = plt.gca() + ax.margins(0.20) + plt.axis("off") + name = self._sample_path._importer.file_path.rsplit('/', 1)[-1] + name = name.split('.', 1)[0] + name += '_' + str(self._sample_path._importer.dataset_id()) + name += '.png' + plt.savefig(name) + plt.clf() + print("Estimated Structure Plot Saved At: ", os.path.abspath(name)) + + + + + diff --git a/PyCTBN/PyCTBN/estimators/structure_score_based_estimator.py b/PyCTBN/PyCTBN/estimators/structure_score_based_estimator.py new file mode 100644 index 0000000..2903db3 --- /dev/null +++ b/PyCTBN/PyCTBN/estimators/structure_score_based_estimator.py @@ -0,0 +1,244 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph + +from random import choice + +import concurrent.futures + +import copy + +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix +from ..structure_graph.network_graph import NetworkGraph +from .parameters_estimator import ParametersEstimator +from .structure_estimator import StructureEstimator +from ..structure_graph.sample_path import SamplePath +from ..structure_graph.structure import Structure +from .fam_score_calculator import FamScoreCalculator +from ..optimizers.hill_climbing_search import HillClimbing +from ..optimizers.tabu_search import TabuSearch + + +import multiprocessing +from multiprocessing import Pool + + + + +class StructureScoreBasedEstimator(StructureEstimator): + """ + Has the task of estimating the network structure given the trajectories in samplepath by + using a score based approach. + + :param sample_path: the _sample_path object containing the trajectories and the real structure + :type sample_path: SamplePath + :param tau_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type tau_xu: float, optional + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 1 + :type alpha_xu: float, optional + :param known_edges: List of known edges, default to [] + :type known_edges: List, optional + + """ + + def __init__(self, sample_path: SamplePath, tau_xu:int=0.1, alpha_xu:int = 1,known_edges: typing.List= []): + super().__init__(sample_path,known_edges) + self.tau_xu=tau_xu + self.alpha_xu=alpha_xu + + + def estimate_structure(self, max_parents:int = None, iterations_number:int= 40, + patience:int = None, tabu_length:int = None, tabu_rules_duration:int = None, + optimizer: str = 'tabu',disable_multiprocessing:bool= False ): + """ + Compute the score-based algorithm to find the optimal structure + + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + :param tabu_length: maximum lenght of the data structures used in the optimization process, default to None + :type tabu_length: int, optional + :param tabu_rules_duration: number of iterations in which each rule keeps its value, default to None + :type tabu_rules_duration: int, optional + :param optimizer: name of the optimizer algorithm. Possible values: 'hill' (Hill climbing),'tabu' (tabu search), defualt to 'tabu' + :type optimizer: string, optional + :param disable_multiprocessing: true if you desire to disable the multiprocessing operations, default to False + :type disable_multiprocessing: Boolean, optional + """ + 'Save the true edges structure in tuples' + true_edges = copy.deepcopy(self._sample_path.structure.edges) + true_edges = set(map(tuple, true_edges)) + + 'Remove all the edges from the structure' + self._sample_path.structure.clean_structure_edges() + + estimate_parents = self.estimate_parents + + n_nodes= len(self._nodes) + + l_max_parents= [max_parents] * n_nodes + l_iterations_number = [iterations_number] * n_nodes + l_patience = [patience] * n_nodes + l_tabu_length = [tabu_length] * n_nodes + l_tabu_rules_duration = [tabu_rules_duration] * n_nodes + l_optimizer = [optimizer] * n_nodes + + + 'get the number of CPU' + cpu_count = multiprocessing.cpu_count() + print(f"CPU COUNT: {cpu_count}") + + if disable_multiprocessing: + cpu_count = 1 + + + + + + #with get_context("spawn").Pool(processes=cpu_count) as pool: + #with multiprocessing.Pool(processes=cpu_count) as pool: + + 'Estimate the best parents for each node' + if disable_multiprocessing: + list_edges_partial = [estimate_parents(n,max_parents,iterations_number,patience,tabu_length,tabu_rules_duration,optimizer) for n in self._nodes] + else: + with concurrent.futures.ProcessPoolExecutor(max_workers=cpu_count) as executor: + list_edges_partial = executor.map(estimate_parents, + self._nodes, + l_max_parents, + l_iterations_number, + l_patience, + l_tabu_length, + l_tabu_rules_duration, + l_optimizer) + + + + #list_edges_partial = p.map(estimate_parents, self._nodes) + #list_edges_partial= estimate_parents('Q',max_parents,iterations_number,patience,tabu_length,tabu_rules_duration,optimizer) + + 'Concatenate all the edges list' + set_list_edges = set(itertools.chain.from_iterable(list_edges_partial)) + + #print('-------------------------') + + + 'calculate precision and recall' + n_missing_edges = 0 + n_added_fake_edges = 0 + + try: + n_added_fake_edges = len(set_list_edges.difference(true_edges)) + + n_missing_edges = len(true_edges.difference(set_list_edges)) + + n_true_positive = len(true_edges) - n_missing_edges + + precision = n_true_positive / (n_true_positive + n_added_fake_edges) + + recall = n_true_positive / (n_true_positive + n_missing_edges) + + + # print(f"n archi reali non trovati: {n_missing_edges}") + # print(f"n archi non reali aggiunti: {n_added_fake_edges}") + print(true_edges) + print(set_list_edges) + print(f"precision: {precision} ") + print(f"recall: {recall} ") + except Exception as e: + print(f"errore: {e}") + + return set_list_edges + + + def estimate_parents(self,node_id:str, max_parents:int = None, iterations_number:int= 40, + patience:int = 10, tabu_length:int = None, tabu_rules_duration:int=5, + optimizer:str = 'hill' ): + """ + Use the FamScore of a node in order to find the best parent nodes + + :param node_id: current node's id + :type node_id: string + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + :param tabu_length: maximum lenght of the data structures used in the optimization process, default to None + :type tabu_length: int, optional + :param tabu_rules_duration: number of iterations in which each rule keeps its value, default to None + :type tabu_rules_duration: int, optional + :param optimizer: name of the optimizer algorithm. Possible values: 'hill' (Hill climbing),'tabu' (tabu search), defualt to 'tabu' + :type optimizer: string, optional + + :return: A list of the best edges for the currente node + :rtype: List + """ + + "choose the optimizer algotithm" + if optimizer == 'tabu': + optimizer = TabuSearch( + node_id = node_id, + structure_estimator = self, + max_parents = max_parents, + iterations_number = iterations_number, + patience = patience, + tabu_length = tabu_length, + tabu_rules_duration = tabu_rules_duration) + else: #if optimizer == 'hill': + optimizer = HillClimbing( + node_id = node_id, + structure_estimator = self, + max_parents = max_parents, + iterations_number = iterations_number, + patience = patience) + + "call the optmizer's function that calculates the current node's parents" + return optimizer.optimize_structure() + + + def get_score_from_graph(self, + graph: NetworkGraph, + node_id:str): + """ + Get the FamScore of a node + + :param node_id: current node's id + :type node_id: string + :param graph: current graph to be computed + :type graph: class:'NetworkGraph' + + + :return: The FamSCore for this graph structure + :rtype: float + """ + + 'inizialize the graph for a single node' + graph.fast_init(node_id) + + params_estimation = ParametersEstimator(self._sample_path.trajectories, graph) + + 'Inizialize and compute parameters for node' + params_estimation.fast_init(node_id) + SoCims = params_estimation.compute_parameters_for_node(node_id) + + 'calculate the FamScore for the node' + fam_score_obj = FamScoreCalculator() + + score = fam_score_obj.get_fam_score(SoCims.actual_cims,tau_xu = self.tau_xu,alpha_xu=self.alpha_xu) + + #print(f" lo score per {node_id} risulta: {score} ") + return score + + + + diff --git a/PyCTBN/PyCTBN/optimizers/__init__.py b/PyCTBN/PyCTBN/optimizers/__init__.py new file mode 100644 index 0000000..4162bf1 --- /dev/null +++ b/PyCTBN/PyCTBN/optimizers/__init__.py @@ -0,0 +1,4 @@ +from .optimizer import Optimizer +from .tabu_search import TabuSearch +from .hill_climbing_search import HillClimbing +from .constraint_based_optimizer import ConstraintBasedOptimizer \ No newline at end of file diff --git a/PyCTBN/PyCTBN/optimizers/constraint_based_optimizer.py b/PyCTBN/PyCTBN/optimizers/constraint_based_optimizer.py new file mode 100644 index 0000000..65bc19c --- /dev/null +++ b/PyCTBN/PyCTBN/optimizers/constraint_based_optimizer.py @@ -0,0 +1,87 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +from random import choice + +from abc import ABC + +import copy + + +from .optimizer import Optimizer +from ..estimators.structure_estimator import StructureEstimator +from ..structure_graph.network_graph import NetworkGraph + + +class ConstraintBasedOptimizer(Optimizer): + """ + Optimizer class that implement a CTPC Algorithm + + :param node_id: current node's id + :type node_id: string + :param structure_estimator: a structure estimator object with the information about the net + :type structure_estimator: class:'StructureEstimator' + :param tot_vars_count: number of variables in the dataset + :type tot_vars_count: int + """ + def __init__(self, + node_id:str, + structure_estimator: StructureEstimator, + tot_vars_count:int + ): + """ + Constructor + """ + super().__init__(node_id, structure_estimator) + self.tot_vars_count = tot_vars_count + + + + def optimize_structure(self): + """ + Compute Optimization process for a structure_estimator by using a CTPC Algorithm + + :return: the estimated structure for the node + :rtype: List + """ + print("##################TESTING VAR################", self.node_id) + + graph = NetworkGraph(self.structure_estimator._sample_path.structure) + + other_nodes = [node for node in self.structure_estimator._sample_path.structure.nodes_labels if node != self.node_id] + + for possible_parent in other_nodes: + graph.add_edges([(possible_parent,self.node_id)]) + + + u = other_nodes + #tests_parents_numb = len(u) + #complete_frame = self.complete_graph_frame + #test_frame = complete_frame.loc[complete_frame['To'].isin([self.node_id])] + child_states_numb = self.structure_estimator._sample_path.structure.get_states_number(self.node_id) + b = 0 + while b < len(u): + parent_indx = 0 + while parent_indx < len(u): + removed = False + test_parent = u[parent_indx] + i = self.structure_estimator._sample_path.structure.get_node_indx(test_parent) + j = self.structure_estimator._sample_path.structure.get_node_indx(self.node_id) + if self.structure_estimator._removable_edges_matrix[i][j]: + S = StructureEstimator.generate_possible_sub_sets_of_size(u, b, test_parent) + for parents_set in S: + if self.structure_estimator.complete_test(test_parent, self.node_id, parents_set, child_states_numb, self.tot_vars_count,i,j): + graph.remove_edges([(test_parent, self.node_id)]) + u.remove(test_parent) + removed = True + break + if not removed: + parent_indx += 1 + b += 1 + self.structure_estimator._cache.clear() + return graph.edges \ No newline at end of file diff --git a/PyCTBN/PyCTBN/optimizers/hill_climbing_search.py b/PyCTBN/PyCTBN/optimizers/hill_climbing_search.py new file mode 100644 index 0000000..6783be0 --- /dev/null +++ b/PyCTBN/PyCTBN/optimizers/hill_climbing_search.py @@ -0,0 +1,135 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +from random import choice + +from abc import ABC + + +from .optimizer import Optimizer +from ..estimators.structure_estimator import StructureEstimator +from ..structure_graph.network_graph import NetworkGraph + + +class HillClimbing(Optimizer): + """ + Optimizer class that implement Hill Climbing Search + + + :param node_id: current node's id + :type node_id: string + :param structure_estimator: a structure estimator object with the information about the net + :type structure_estimator: class:'StructureEstimator' + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + + + + """ + def __init__(self, + node_id:str, + structure_estimator: StructureEstimator, + max_parents:int = None, + iterations_number:int= 40, + patience:int = None + ): + """ + Constructor + """ + super().__init__(node_id, structure_estimator) + self.max_parents = max_parents + self.iterations_number = iterations_number + self.patience = patience + + + + def optimize_structure(self) -> typing.List: + """ + Compute Optimization process for a structure_estimator by using a Hill Climbing Algorithm + + :return: the estimated structure for the node + :rtype: List + """ + + #'Create the graph for the single node' + graph = NetworkGraph(self.structure_estimator._sample_path.structure) + + 'get the index for the current node' + node_index = self.structure_estimator._sample_path._structure.get_node_indx(self.node_id) + + 'list of prior edges' + prior_parents = set() + + 'Add the edges from prior knowledge' + for i in range(len(self.structure_estimator._removable_edges_matrix)): + if not self.structure_estimator._removable_edges_matrix[i][node_index]: + parent_id= self.structure_estimator._sample_path._structure.get_node_id(i) + prior_parents.add(parent_id) + + 'Add the node to the starting structure' + graph.add_edges([(parent_id, self.node_id)]) + + + + 'get all the possible parents' + other_nodes = [node for node in + self.structure_estimator._sample_path.structure.nodes_labels if + node != self.node_id and + not prior_parents.__contains__(node)] + + actual_best_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + patince_count = 0 + for i in range(self.iterations_number): + 'choose a new random edge' + current_new_parent = choice(other_nodes) + current_edge = (current_new_parent,self.node_id) + added = False + parent_removed = None + + + if graph.has_edge(current_edge): + graph.remove_edges([current_edge]) + else: + 'check the max_parents constraint' + if self.max_parents is not None: + parents_list = graph.get_parents_by_id(self.node_id) + if len(parents_list) >= self.max_parents : + parent_removed = (choice(parents_list), self.node_id) + graph.remove_edges([parent_removed]) + graph.add_edges([current_edge]) + added = True + #print('**************************') + current_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + + if current_score > actual_best_score: + 'update current best score' + actual_best_score = current_score + patince_count = 0 + else: + 'undo the last update' + if added: + graph.remove_edges([current_edge]) + 'If a parent was removed, add it again to the graph' + if parent_removed is not None: + graph.add_edges([parent_removed]) + else: + graph.add_edges([current_edge]) + 'update patience count' + patince_count += 1 + + if self.patience is not None and patince_count > self.patience: + break + + print(f"finito variabile: {self.node_id}") + return graph.edges \ No newline at end of file diff --git a/PyCTBN/PyCTBN/optimizers/optimizer.py b/PyCTBN/PyCTBN/optimizers/optimizer.py new file mode 100644 index 0000000..36445c0 --- /dev/null +++ b/PyCTBN/PyCTBN/optimizers/optimizer.py @@ -0,0 +1,39 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +import abc + +from ..estimators.structure_estimator import StructureEstimator + + + +class Optimizer(abc.ABC): + """ + Interface class for all the optimizer's child PyCTBN + + :param node_id: the node label + :type node_id: string + :param structure_estimator: A structureEstimator Object to predict the structure + :type structure_estimator: class:'StructureEstimator' + + """ + + def __init__(self, node_id:str, structure_estimator: StructureEstimator): + self.node_id = node_id + self.structure_estimator = structure_estimator + + + @abc.abstractmethod + def optimize_structure(self) -> typing.List: + """ + Compute Optimization process for a structure_estimator + + :return: the estimated structure for the node + :rtype: List + """ + pass diff --git a/PyCTBN/PyCTBN/optimizers/tabu_search.py b/PyCTBN/PyCTBN/optimizers/tabu_search.py new file mode 100644 index 0000000..e15dd40 --- /dev/null +++ b/PyCTBN/PyCTBN/optimizers/tabu_search.py @@ -0,0 +1,199 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +from random import choice,sample + +from abc import ABC + + +from .optimizer import Optimizer +from ..estimators.structure_estimator import StructureEstimator +from ..structure_graph.network_graph import NetworkGraph + +import queue + + +class TabuSearch(Optimizer): + """ + Optimizer class that implement Tabu Search + + + :param node_id: current node's id + :type node_id: string + :param structure_estimator: a structure estimator object with the information about the net + :type structure_estimator: class:'StructureEstimator' + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + :param tabu_length: maximum lenght of the data structures used in the optimization process, default to None + :type tabu_length: int, optional + :param tabu_rules_duration: number of iterations in which each rule keeps its value, default to None + :type tabu_rules_duration: int, optional + + + """ + def __init__(self, + node_id:str, + structure_estimator: StructureEstimator, + max_parents:int = None, + iterations_number:int= 40, + patience:int = None, + tabu_length:int = None, + tabu_rules_duration = None + ): + """ + Constructor + """ + super().__init__(node_id, structure_estimator) + self.max_parents = max_parents + self.iterations_number = iterations_number + self.patience = patience + self.tabu_length = tabu_length + self.tabu_rules_duration = tabu_rules_duration + + + def optimize_structure(self) -> typing.List: + """ + Compute Optimization process for a structure_estimator by using a Hill Climbing Algorithm + + :return: the estimated structure for the node + :rtype: List + """ + print(f"tabu search is processing the structure of {self.node_id}") + + 'Create the graph for the single node' + graph = NetworkGraph(self.structure_estimator._sample_path.structure) + + 'get the index for the current node' + node_index = self.structure_estimator._sample_path._structure.get_node_indx(self.node_id) + + 'list of prior edges' + prior_parents = set() + + 'Add the edges from prior knowledge' + for i in range(len(self.structure_estimator._removable_edges_matrix)): + if not self.structure_estimator._removable_edges_matrix[i][node_index]: + parent_id= self.structure_estimator._sample_path._structure.get_node_id(i) + prior_parents.add(parent_id) + + 'Add the node to the starting structure' + graph.add_edges([(parent_id, self.node_id)]) + + + + 'get all the possible parents' + other_nodes = set([node for node in + self.structure_estimator._sample_path.structure.nodes_labels if + node != self.node_id and + not prior_parents.__contains__(node)]) + + 'calculate the score for the node without parents' + actual_best_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + + 'initialize tabu_length and tabu_rules_duration if None' + if self.tabu_length is None: + self.tabu_length = len(other_nodes) + + if self.tabu_rules_duration is None: + self.tabu_tabu_rules_durationength = len(other_nodes) + + 'inizialize the data structures' + tabu_set = set() + tabu_queue = queue.Queue() + + patince_count = 0 + tabu_count = 0 + for i in range(self.iterations_number): + + current_possible_nodes = other_nodes.difference(tabu_set) + + 'choose a new random edge according to tabu restiction' + if(len(current_possible_nodes) > 0): + current_new_parent = sample(current_possible_nodes,k=1)[0] + else: + current_new_parent = tabu_queue.get() + tabu_set.remove(current_new_parent) + + + + current_edge = (current_new_parent,self.node_id) + added = False + parent_removed = None + + if graph.has_edge(current_edge): + graph.remove_edges([current_edge]) + else: + 'check the max_parents constraint' + if self.max_parents is not None: + parents_list = graph.get_parents_by_id(self.node_id) + if len(parents_list) >= self.max_parents : + parent_removed = (choice(parents_list), self.node_id) + graph.remove_edges([parent_removed]) + graph.add_edges([current_edge]) + added = True + #print('**************************') + current_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + + # print("-------------------------------------------") + # print(f"Current new parent: {current_new_parent}") + # print(f"Current score: {current_score}") + # print(f"Current best score: {actual_best_score}") + # print(f"tabu list : {str(tabu_set)} length: {len(tabu_set)}") + # print(f"tabu queue : {str(tabu_queue)} length: {tabu_queue.qsize()}") + # print(f"graph edges: {graph.edges}") + + # print("-------------------------------------------") + # input() + if current_score > actual_best_score: + 'update current best score' + actual_best_score = current_score + patince_count = 0 + 'update tabu list' + + + else: + 'undo the last update' + if added: + graph.remove_edges([current_edge]) + 'If a parent was removed, add it again to the graph' + if parent_removed is not None: + graph.add_edges([parent_removed]) + else: + graph.add_edges([current_edge]) + 'update patience count' + patince_count += 1 + + + if tabu_queue.qsize() >= self.tabu_length: + current_removed = tabu_queue.get() + tabu_set.remove(current_removed) + 'Add the node on the tabu list' + tabu_queue.put(current_new_parent) + tabu_set.add(current_new_parent) + + tabu_count += 1 + + 'Every tabu_rules_duration step remove an item from the tabu list ' + if tabu_count % self.tabu_rules_duration == 0: + if tabu_queue.qsize() > 0: + current_removed = tabu_queue.get() + tabu_set.remove(current_removed) + tabu_count = 0 + else: + tabu_count = 0 + + if self.patience is not None and patince_count > self.patience: + break + + print(f"finito variabile: {self.node_id}") + return graph.edges \ No newline at end of file diff --git a/PyCTBN/PyCTBN/structure_graph/__init__.py b/PyCTBN/PyCTBN/structure_graph/__init__.py new file mode 100644 index 0000000..85f18a2 --- /dev/null +++ b/PyCTBN/PyCTBN/structure_graph/__init__.py @@ -0,0 +1,6 @@ +from .conditional_intensity_matrix import ConditionalIntensityMatrix +from .network_graph import NetworkGraph +from .sample_path import SamplePath +from .set_of_cims import SetOfCims +from .structure import Structure +from .trajectory import Trajectory \ No newline at end of file diff --git a/PyCTBN/PyCTBN/structure_graph/conditional_intensity_matrix.py b/PyCTBN/PyCTBN/structure_graph/conditional_intensity_matrix.py new file mode 100644 index 0000000..4abfdd0 --- /dev/null +++ b/PyCTBN/PyCTBN/structure_graph/conditional_intensity_matrix.py @@ -0,0 +1,42 @@ +import numpy as np + + +class ConditionalIntensityMatrix(object): + """Abstracts the Conditional Intesity matrix of a node as aggregation of the state residence times vector + and state transition matrix and the actual CIM matrix. + + :param state_residence_times: state residence times vector + :type state_residence_times: numpy.array + :param state_transition_matrix: the transitions count matrix + :type state_transition_matrix: numpy.ndArray + :_cim: the actual cim of the node + """ + def __init__(self, state_residence_times: np.array, state_transition_matrix: np.array): + """Constructor Method + """ + self._state_residence_times = state_residence_times + self._state_transition_matrix = state_transition_matrix + self._cim = self.state_transition_matrix.astype(np.float64) + + def compute_cim_coefficients(self) -> None: + """Compute the coefficients of the matrix _cim by using the following equality q_xx' = M[x, x'] / T[x]. + The class member ``_cim`` will contain the computed cim + """ + np.fill_diagonal(self._cim, self._cim.diagonal() * -1) + self._cim = ((self._cim.T + 1) / (self._state_residence_times + 1)).T + + @property + def state_residence_times(self) -> np.ndarray: + return self._state_residence_times + + @property + def state_transition_matrix(self) -> np.ndarray: + return self._state_transition_matrix + + @property + def cim(self) -> np.ndarray: + return self._cim + + def __repr__(self): + return 'CIM:\n' + str(self.cim) + diff --git a/PyCTBN/PyCTBN/structure_graph/network_graph.py b/PyCTBN/PyCTBN/structure_graph/network_graph.py new file mode 100644 index 0000000..623981d --- /dev/null +++ b/PyCTBN/PyCTBN/structure_graph/network_graph.py @@ -0,0 +1,293 @@ + +import typing + +import networkx as nx +import numpy as np + +from .structure import Structure + + +class NetworkGraph(object): + """Abstracts the infos contained in the Structure class in the form of a directed graph. + Has the task of creating all the necessary filtering and indexing structures for parameters estimation + + :param graph_struct: the ``Structure`` object from which infos about the net will be extracted + :type graph_struct: Structure + :_graph: directed graph + :_aggregated_info_about_nodes_parents: a structure that contains all the necessary infos + about every parents of the node of which all the indexing and filtering structures will be constructed. + :_time_scalar_indexing_structure: the indexing structure for state res time estimation + :_transition_scalar_indexing_structure: the indexing structure for transition computation + :_time_filtering: the columns filtering structure used in the computation of the state res times + :_transition_filtering: the columns filtering structure used in the computation of the transition + from one state to another + :_p_combs_structure: all the possible parents states combination for the node of interest + """ + + def __init__(self, graph_struct: Structure): + """Constructor Method + """ + self._graph_struct = graph_struct + self._graph = nx.DiGraph() + self._aggregated_info_about_nodes_parents = None + self._time_scalar_indexing_structure = None + self._transition_scalar_indexing_structure = None + self._time_filtering = None + self._transition_filtering = None + self._p_combs_structure = None + + def init_graph(self): + self.add_nodes(self._nodes_labels) + self.add_edges(self.graph_struct.edges) + self.aggregated_info_about_nodes_parents = self.get_ord_set_of_par_of_all_nodes() + self._fancy_indexing = self.build_fancy_indexing_structure(0) + self.build_scalar_indexing_structures() + self.build_time_columns_filtering_structure() + self.build_transition_columns_filtering_structure() + self._p_combs_structure = self.build_p_combs_structure() + + def fast_init(self, node_id: str) -> None: + """Initializes all the necessary structures for parameters estimation of the node identified by the label + node_id + + :param node_id: the label of the node + :type node_id: string + """ + self.add_nodes(self._graph_struct.nodes_labels) + self.add_edges(self._graph_struct.edges) + self._aggregated_info_about_nodes_parents = self.get_ordered_by_indx_set_of_parents(node_id) + p_indxs = self._aggregated_info_about_nodes_parents[1] + p_vals = self._aggregated_info_about_nodes_parents[2] + node_states = self.get_states_number(node_id) + node_indx = self.get_node_indx(node_id) + cols_number = self._graph_struct.total_variables_number + self._time_scalar_indexing_structure = NetworkGraph.\ + build_time_scalar_indexing_structure_for_a_node(node_states, p_vals) + self._transition_scalar_indexing_structure = NetworkGraph.\ + build_transition_scalar_indexing_structure_for_a_node(node_states, p_vals) + self._time_filtering = NetworkGraph.build_time_columns_filtering_for_a_node(node_indx, p_indxs) + self._transition_filtering = NetworkGraph.build_transition_filtering_for_a_node(node_indx, p_indxs, cols_number) + self._p_combs_structure = NetworkGraph.build_p_comb_structure_for_a_node(p_vals) + + def add_nodes(self, list_of_nodes: typing.List) -> None: + """Adds the nodes to the ``_graph`` contained in the list of nodes ``list_of_nodes``. + Sets all the properties that identify a nodes (index, positional index, cardinality) + + :param list_of_nodes: the nodes to add to ``_graph`` + :type list_of_nodes: List + """ + nodes_indxs = self._graph_struct.nodes_indexes + nodes_vals = self._graph_struct.nodes_values + pos = 0 + for id, node_indx, node_val in zip(list_of_nodes, nodes_indxs, nodes_vals): + self._graph.add_node(id, indx=node_indx, val=node_val, pos_indx=pos) + pos += 1 + + def has_edge(self,edge:tuple)-> bool: + """ + Check if the graph contains a specific edge + + Parameters: + edge: a tuple that rappresents the edge + Returns: + bool + """ + return self._graph.has_edge(edge[0],edge[1]) + + def add_edges(self, list_of_edges: typing.List) -> None: + """Add the edges to the ``_graph`` contained in the list ``list_of_edges``. + + :param list_of_edges: the list containing of tuples containing the edges + :type list_of_edges: List + """ + self._graph.add_edges_from(list_of_edges) + + def remove_node(self, node_id: str) -> None: + """Remove the node ``node_id`` from all the class members. + Initialize all the filtering/indexing structures. + """ + self._graph.remove_node(node_id) + self._graph_struct.remove_node(node_id) + self.clear_indexing_filtering_structures() + + def clear_indexing_filtering_structures(self) -> None: + """Initialize all the filtering/indexing structures. + """ + self._aggregated_info_about_nodes_parents = None + self._time_scalar_indexing_structure = None + self._transition_scalar_indexing_structure = None + self._time_filtering = None + self._transition_filtering = None + self._p_combs_structure = None + + def get_ordered_by_indx_set_of_parents(self, node: str) -> typing.Tuple: + """Builds the aggregated structure that holds all the infos relative to the parent set of the node, namely + (parents_labels, parents_indexes, parents_cardinalities). + + :param node: the label of the node + :type node: string + :return: a tuple containing all the parent set infos + :rtype: Tuple + """ + parents = self.get_parents_by_id(node) + nodes = self._graph_struct.nodes_labels + d = {v: i for i, v in enumerate(nodes)} + sorted_parents = sorted(parents, key=lambda v: d[v]) + get_node_indx = self.get_node_indx + p_indxes = [get_node_indx(node) for node in sorted_parents] + p_values = [self.get_states_number(node) for node in sorted_parents] + return sorted_parents, p_indxes, p_values + + def remove_edges(self, list_of_edges: typing.List) -> None: + """Remove the edges to the graph contained in the list list_of_edges. + + :param list_of_edges: The edges to remove from the graph + :type list_of_edges: List + """ + self._graph.remove_edges_from(list_of_edges) + + @staticmethod + def build_time_scalar_indexing_structure_for_a_node(node_states: int, + parents_vals: typing.List) -> np.ndarray: + """Builds an indexing structure for the computation of state residence times values. + + :param node_states: the node cardinality + :type node_states: int + :param parents_vals: the caridinalites of the node's parents + :type parents_vals: List + :return: The time indexing structure + :rtype: numpy.ndArray + """ + T_vector = np.array([node_states]) + T_vector = np.append(T_vector, parents_vals) + T_vector = T_vector.cumprod().astype(np.int) + return T_vector + + @staticmethod + def build_transition_scalar_indexing_structure_for_a_node(node_states_number: int, parents_vals: typing.List) \ + -> np.ndarray: + """Builds an indexing structure for the computation of state transitions values. + + :param node_states_number: the node cardinality + :type node_states_number: int + :param parents_vals: the caridinalites of the node's parents + :type parents_vals: List + :return: The transition indexing structure + :rtype: numpy.ndArray + """ + M_vector = np.array([node_states_number, + node_states_number]) + M_vector = np.append(M_vector, parents_vals) + M_vector = M_vector.cumprod().astype(np.int) + return M_vector + + @staticmethod + def build_time_columns_filtering_for_a_node(node_indx: int, p_indxs: typing.List) -> np.ndarray: + """ + Builds the necessary structure to filter the desired columns indicated by ``node_indx`` and ``p_indxs`` + in the dataset. + This structute will be used in the computation of the state res times. + :param node_indx: the index of the node + :type node_indx: int + :param p_indxs: the indexes of the node's parents + :type p_indxs: List + :return: The filtering structure for times estimation + :rtype: numpy.ndArray + """ + return np.append(np.array([node_indx], dtype=np.int), p_indxs).astype(np.int) + + @staticmethod + def build_transition_filtering_for_a_node(node_indx: int, p_indxs: typing.List, nodes_number: int) \ + -> np.ndarray: + """Builds the necessary structure to filter the desired columns indicated by ``node_indx`` and ``p_indxs`` + in the dataset. + This structure will be used in the computation of the state transitions values. + :param node_indx: the index of the node + :type node_indx: int + :param p_indxs: the indexes of the node's parents + :type p_indxs: List + :param nodes_number: the total number of nodes in the dataset + :type nodes_number: int + :return: The filtering structure for transitions estimation + :rtype: numpy.ndArray + """ + return np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int) + + @staticmethod + def build_p_comb_structure_for_a_node(parents_values: typing.List) -> np.ndarray: + """ + Builds the combinatorial structure that contains the combinations of all the values contained in + ``parents_values``. + + :param parents_values: the cardinalities of the nodes + :type parents_values: List + :return: A numpy matrix containing a grid of the combinations + :rtype: numpy.ndArray + """ + tmp = [] + for val in parents_values: + tmp.append([x for x in range(val)]) + if len(parents_values) > 0: + parents_comb = np.array(np.meshgrid(*tmp)).T.reshape(-1, len(parents_values)) + if len(parents_values) > 1: + tmp_comb = parents_comb[:, 1].copy() + parents_comb[:, 1] = parents_comb[:, 0].copy() + parents_comb[:, 0] = tmp_comb + else: + parents_comb = np.array([[]], dtype=np.int) + return parents_comb + + def get_parents_by_id(self, node_id) -> typing.List: + """Returns a list of labels of the parents of the node ``node_id`` + + :param node_id: the node label + :type node_id: string + :return: a List of labels of the parents + :rtype: List + """ + return list(self._graph.predecessors(node_id)) + + def get_states_number(self, node_id) -> int: + return self._graph.nodes[node_id]['val'] + + def get_node_indx(self, node_id) -> int: + return nx.get_node_attributes(self._graph, 'indx')[node_id] + + def get_positional_node_indx(self, node_id) -> int: + return self._graph.nodes[node_id]['pos_indx'] + + @property + def nodes(self) -> typing.List: + return self._graph_struct.nodes_labels + + @property + def edges(self) -> typing.List: + return list(self._graph.edges) + + @property + def nodes_indexes(self) -> np.ndarray: + return self._graph_struct.nodes_indexes + + @property + def nodes_values(self) -> np.ndarray: + return self._graph_struct.nodes_values + + @property + def time_scalar_indexing_strucure(self) -> np.ndarray: + return self._time_scalar_indexing_structure + + @property + def time_filtering(self) -> np.ndarray: + return self._time_filtering + + @property + def transition_scalar_indexing_structure(self) -> np.ndarray: + return self._transition_scalar_indexing_structure + + @property + def transition_filtering(self) -> np.ndarray: + return self._transition_filtering + + @property + def p_combs(self) -> np.ndarray: + return self._p_combs_structure diff --git a/PyCTBN/PyCTBN/structure_graph/sample_path.py b/PyCTBN/PyCTBN/structure_graph/sample_path.py new file mode 100644 index 0000000..80b51d9 --- /dev/null +++ b/PyCTBN/PyCTBN/structure_graph/sample_path.py @@ -0,0 +1,91 @@ + + +import numpy as np +import pandas as pd + +from .structure import Structure +from .trajectory import Trajectory +from ..utility.abstract_importer import AbstractImporter + + + +class SamplePath(object): + """Aggregates all the informations about the trajectories, the real structure of the sampled net and variables + cardinalites. Has the task of creating the objects ``Trajectory`` and ``Structure`` that will + contain the mentioned data. + + :param importer: the Importer object which contains the imported and processed data + :type importer: AbstractImporter + :_trajectories: the ``Trajectory`` object that will contain all the concatenated trajectories + :_structure: the ``Structure`` Object that will contain all the structural infos about the net + :_total_variables_count: the number of variables in the net + """ + def __init__(self, importer: AbstractImporter): + """Constructor Method + """ + self._importer = importer + if self._importer._df_variables is None or self._importer._concatenated_samples is None: + raise RuntimeError('The importer object has to contain the all processed data!') + if self._importer._df_variables.empty: + raise RuntimeError('The importer object has to contain the all processed data!') + if isinstance(self._importer._concatenated_samples, pd.DataFrame): + if self._importer._concatenated_samples.empty: + raise RuntimeError('The importer object has to contain the all processed data!') + if isinstance(self._importer._concatenated_samples, np.ndarray): + if self._importer._concatenated_samples.size == 0: + raise RuntimeError('The importer object has to contain the all processed data!') + self._trajectories = None + self._structure = None + self._total_variables_count = None + + def build_trajectories(self) -> None: + """Builds the Trajectory object that will contain all the trajectories. + Clears all the unused dataframes in ``_importer`` Object + """ + self._trajectories = \ + Trajectory(self._importer.build_list_of_samples_array(self._importer.concatenated_samples), + len(self._importer.sorter) + 1) + self._importer.clear_concatenated_frame() + + def build_structure(self) -> None: + """ + Builds the ``Structure`` object that aggregates all the infos about the net. + """ + if self._importer.sorter != self._importer.variables.iloc[:, 0].to_list(): + raise RuntimeError("The Dataset columns order have to match the order of labels in the variables Frame!") + + self._total_variables_count = len(self._importer.sorter) + labels = self._importer.variables.iloc[:, 0].to_list() + indxs = self._importer.variables.index.to_numpy() + vals = self._importer.variables.iloc[:, 1].to_numpy() + if self._importer.structure is None or self._importer.structure.empty: + edges = [] + else: + edges = list(self._importer.structure.to_records(index=False)) + self._structure = Structure(labels, indxs, vals, edges, + self._total_variables_count) + + def clear_memory(self): + self._importer._raw_data = [] + + @property + def trajectories(self) -> Trajectory: + return self._trajectories + + @property + def structure(self) -> Structure: + return self._structure + + @property + def total_variables_count(self) -> int: + return self._total_variables_count + + @property + def has_prior_net_structure(self) -> bool: + return bool(self._structure.edges) + + + + + + diff --git a/PyCTBN/PyCTBN/structure_graph/set_of_cims.py b/PyCTBN/PyCTBN/structure_graph/set_of_cims.py new file mode 100644 index 0000000..81caff5 --- /dev/null +++ b/PyCTBN/PyCTBN/structure_graph/set_of_cims.py @@ -0,0 +1,97 @@ + + +import typing + +import numpy as np + +from .conditional_intensity_matrix import ConditionalIntensityMatrix + + +class SetOfCims(object): + """Aggregates all the CIMS of the node identified by the label _node_id. + + :param node_id: the node label + :type node_ind: string + :param parents_states_number: the cardinalities of the parents + :type parents_states_number: List + :param node_states_number: the caridinality of the node + :type node_states_number: int + :param p_combs: the p_comb structure bound to this node + :type p_combs: numpy.ndArray + :_state_residence_time: matrix containing all the state residence time vectors for the node + :_transition_matrices: matrix containing all the transition matrices for the node + :_actual_cims: the cims of the node + """ + + def __init__(self, node_id: str, parents_states_number: typing.List, node_states_number: int, p_combs: np.ndarray): + """Constructor Method + """ + self._node_id = node_id + self._parents_states_number = parents_states_number + self._node_states_number = node_states_number + self._actual_cims = [] + self._state_residence_times = None + self._transition_matrices = None + self._p_combs = p_combs + self.build_times_and_transitions_structures() + + def build_times_and_transitions_structures(self) -> None: + """Initializes at the correct dimensions the state residence times matrix and the state transition matrices. + """ + if not self._parents_states_number: + self._state_residence_times = np.zeros((1, self._node_states_number), dtype=np.float) + self._transition_matrices = np.zeros((1, self._node_states_number, self._node_states_number), dtype=np.int) + else: + self._state_residence_times = \ + np.zeros((np.prod(self._parents_states_number), self._node_states_number), dtype=np.float) + self._transition_matrices = np.zeros([np.prod(self._parents_states_number), self._node_states_number, + self._node_states_number], dtype=np.int) + + def build_cims(self, state_res_times: np.ndarray, transition_matrices: np.ndarray) -> None: + """Build the ``ConditionalIntensityMatrix`` objects given the state residence times and transitions matrices. + Compute the cim coefficients.The class member ``_actual_cims`` will contain the computed cims. + + :param state_res_times: the state residence times matrix + :type state_res_times: numpy.ndArray + :param transition_matrices: the transition matrices + :type transition_matrices: numpy.ndArray + """ + for state_res_time_vector, transition_matrix in zip(state_res_times, transition_matrices): + cim_to_add = ConditionalIntensityMatrix(state_res_time_vector, transition_matrix) + cim_to_add.compute_cim_coefficients() + self._actual_cims.append(cim_to_add) + self._actual_cims = np.array(self._actual_cims) + self._transition_matrices = None + self._state_residence_times = None + + def filter_cims_with_mask(self, mask_arr: np.ndarray, comb: typing.List) -> np.ndarray: + """Filter the cims contained in the array ``_actual_cims`` given the boolean mask ``mask_arr`` and the index + ``comb``. + + :param mask_arr: the boolean mask that indicates which parent to consider + :type mask_arr: numpy.array + :param comb: the state/s of the filtered parents + :type comb: numpy.array + :return: Array of ``ConditionalIntensityMatrix`` objects + :rtype: numpy.array + """ + if mask_arr.size <= 1: + return self._actual_cims + else: + flat_indxs = np.argwhere(np.all(self._p_combs[:, mask_arr] == comb, axis=1)).ravel() + return self._actual_cims[flat_indxs] + + @property + def actual_cims(self) -> np.ndarray: + return self._actual_cims + + @property + def p_combs(self) -> np.ndarray: + return self._p_combs + + def get_cims_number(self): + return len(self._actual_cims) + + + + diff --git a/PyCTBN/PyCTBN/structure_graph/structure.py b/PyCTBN/PyCTBN/structure_graph/structure.py new file mode 100644 index 0000000..a9d60cc --- /dev/null +++ b/PyCTBN/PyCTBN/structure_graph/structure.py @@ -0,0 +1,124 @@ + +import typing as ty + +import numpy as np + + +class Structure(object): + """Contains all the infos about the network structure(nodes labels, nodes caridinalites, edges, indexes) + + :param nodes_labels_list: the symbolic names of the variables + :type nodes_labels_list: List + :param nodes_indexes_arr: the indexes of the nodes + :type nodes_indexes_arr: numpy.ndArray + :param nodes_vals_arr: the cardinalites of the nodes + :type nodes_vals_arr: numpy.ndArray + :param edges_list: the edges of the network + :type edges_list: List + :param total_variables_number: the total number of variables in the dataset + :type total_variables_number: int + """ + + def __init__(self, nodes_labels_list: ty.List, nodes_indexes_arr: np.ndarray, nodes_vals_arr: np.ndarray, + edges_list: ty.List, total_variables_number: int): + """Constructor Method + """ + self._nodes_labels_list = nodes_labels_list + self._nodes_indexes_arr = nodes_indexes_arr + self._nodes_vals_arr = nodes_vals_arr + self._edges_list = edges_list + self._total_variables_number = total_variables_number + + def remove_node(self, node_id: str) -> None: + """Remove the node ``node_id`` from all the class members. + The class member ``_total_variables_number`` since it refers to the total number of variables in the dataset. + """ + node_positional_indx = self._nodes_labels_list.index(node_id) + del self._nodes_labels_list[node_positional_indx] + self._nodes_indexes_arr = np.delete(self._nodes_indexes_arr, node_positional_indx) + self._nodes_vals_arr = np.delete(self._nodes_vals_arr, node_positional_indx) + self._edges_list = [(from_node, to_node) for (from_node, to_node) in self._edges_list if (from_node != node_id + and to_node != node_id)] + + @property + def edges(self) -> ty.List: + return self._edges_list + + @property + def nodes_labels(self) -> ty.List: + return self._nodes_labels_list + + @property + def nodes_indexes(self) -> np.ndarray: + return self._nodes_indexes_arr + + @property + def nodes_values(self) -> np.ndarray: + return self._nodes_vals_arr + + @property + def total_variables_number(self) -> int: + return self._total_variables_number + + def get_node_id(self, node_indx: int) -> str: + """Given the ``node_index`` returns the node label. + + :param node_indx: the node index + :type node_indx: int + :return: the node label + :rtype: string + """ + return self._nodes_labels_list[node_indx] + + def clean_structure_edges(self): + self._edges_list = list() + + def add_edge(self,edge: tuple): + self._edges_list.append(tuple) + print(self._edges_list) + + def remove_edge(self,edge: tuple): + self._edges_list.remove(tuple) + + def contains_edge(self,edge:tuple) -> bool: + return edge in self._edges_list + + def get_node_indx(self, node_id: str) -> int: + """Given the ``node_index`` returns the node label. + + :param node_id: the node label + :type node_id: string + :return: the node index + :rtype: int + """ + pos_indx = self._nodes_labels_list.index(node_id) + return self._nodes_indexes_arr[pos_indx] + + def get_positional_node_indx(self, node_id: str) -> int: + return self._nodes_labels_list.index(node_id) + + def get_states_number(self, node: str) -> int: + """Given the node label ``node`` returns the cardinality of the node. + + :param node: the node label + :type node: string + :return: the node cardinality + :rtype: int + """ + pos_indx = self._nodes_labels_list.index(node) + return self._nodes_vals_arr[pos_indx] + + def __repr__(self): + return "Variables:\n" + str(self._nodes_labels_list) +"\nValues:\n"+ str(self._nodes_vals_arr) +\ + "\nEdges: \n" + str(self._edges_list) + + def __eq__(self, other): + """Overrides the default implementation""" + if isinstance(other, Structure): + return set(self._nodes_labels_list) == set(other._nodes_labels_list) and \ + np.array_equal(self._nodes_vals_arr, other._nodes_vals_arr) and \ + np.array_equal(self._nodes_indexes_arr, other._nodes_indexes_arr) and \ + self._edges_list == other._edges_list + + return False + diff --git a/PyCTBN/PyCTBN/structure_graph/trajectory.py b/PyCTBN/PyCTBN/structure_graph/trajectory.py new file mode 100644 index 0000000..36899b3 --- /dev/null +++ b/PyCTBN/PyCTBN/structure_graph/trajectory.py @@ -0,0 +1,45 @@ + +import typing + +import numpy as np + + +class Trajectory(object): + """ Abstracts the infos about a complete set of trajectories, represented as a numpy array of doubles + (the time deltas) and a numpy matrix of ints (the changes of states). + + :param list_of_columns: the list containing the times array and values matrix + :type list_of_columns: List + :param original_cols_number: total number of cols in the data + :type original_cols_number: int + :_actual_trajectory: the trajectory containing also the duplicated/shifted values + :_times: the array containing the time deltas + """ + + def __init__(self, list_of_columns: typing.List, original_cols_number: int): + """Constructor Method + """ + self._times = list_of_columns[0] + self._actual_trajectory = list_of_columns[1] + self._original_cols_number = original_cols_number + + @property + def trajectory(self) -> np.ndarray: + return self._actual_trajectory[:, :self._original_cols_number - 1] + + @property + def complete_trajectory(self) -> np.ndarray: + return self._actual_trajectory + + @property + def times(self): + return self._times + + def size(self): + return self._actual_trajectory.shape[0] + + def __repr__(self): + return "Complete Trajectory Rows: " + str(self.size()) + "\n" + self.complete_trajectory.__repr__() + \ + "\nTimes Rows:" + str(self.times.size) + "\n" + self.times.__repr__() + + diff --git a/PyCTBN/PyCTBN/utility/__init__.py b/PyCTBN/PyCTBN/utility/__init__.py new file mode 100644 index 0000000..f79749c --- /dev/null +++ b/PyCTBN/PyCTBN/utility/__init__.py @@ -0,0 +1,4 @@ +from .abstract_importer import AbstractImporter +from .cache import Cache +from .json_importer import JsonImporter +from .sample_importer import SampleImporter \ No newline at end of file diff --git a/PyCTBN/PyCTBN/utility/abstract_importer.py b/PyCTBN/PyCTBN/utility/abstract_importer.py new file mode 100644 index 0000000..1cad352 --- /dev/null +++ b/PyCTBN/PyCTBN/utility/abstract_importer.py @@ -0,0 +1,164 @@ + +import typing +from abc import ABC, abstractmethod + +import numpy as np +import pandas as pd + +import copy + +#from sklearn.utils import resample + + +class AbstractImporter(ABC): + """Abstract class that exposes all the necessary methods to process the trajectories and the net structure. + + :param file_path: the file path, or dataset name if you import already processed data + :type file_path: str + :param trajectory_list: Dataframe or numpy array containing the concatenation of all the processed trajectories + :type trajectory_list: typing.Union[pandas.DataFrame, numpy.ndarray] + :param variables: Dataframe containing the nodes labels and cardinalities + :type variables: pandas.DataFrame + :prior_net_structure: Dataframe containing the structure of the network (edges) + :type prior_net_structure: pandas.DataFrame + :_sorter: A list containing the variables labels in the SAME order as the columns in ``concatenated_samples`` + + .. warning:: + The parameters ``variables`` and ``prior_net_structure`` HAVE to be properly constructed + as Pandas Dataframes with the following structure: + Header of _df_structure = [From_Node | To_Node] + Header of _df_variables = [Variable_Label | Variable_Cardinality] + See the tutorial on how to construct a correct ``concatenated_samples`` Dataframe/ndarray. + + .. note:: + See :class:``JsonImporter`` for an example implementation + + """ + + def __init__(self, file_path: str = None, trajectory_list: typing.Union[pd.DataFrame, np.ndarray] = None, + variables: pd.DataFrame = None, prior_net_structure: pd.DataFrame = None): + """Constructor + """ + self._file_path = file_path + self._df_samples_list = trajectory_list + self._concatenated_samples = [] + self._df_variables = variables + self._df_structure = prior_net_structure + self._sorter = None + super().__init__() + + @abstractmethod + def build_sorter(self, trajecory_header: object) -> typing.List: + """Initializes the ``_sorter`` class member from a trajectory dataframe, exctracting the header of the frame + and keeping ONLY the variables symbolic labels, cutting out the time label in the header. + + :param trajecory_header: an object that will be used to define the header + :type trajecory_header: object + :return: A list containing the processed header. + :rtype: List + """ + pass + + def compute_row_delta_sigle_samples_frame(self, sample_frame: pd.DataFrame, + columns_header: typing.List, shifted_cols_header: typing.List) \ + -> pd.DataFrame: + """Computes the difference between each value present in th time column. + Copies and shift by one position up all the values present in the remaining columns. + + :param sample_frame: the traj to be processed + :type sample_frame: pandas.Dataframe + :param columns_header: the original header of sample_frame + :type columns_header: List + :param shifted_cols_header: a copy of columns_header with changed names of the contents + :type shifted_cols_header: List + :return: The processed dataframe + :rtype: pandas.Dataframe + + .. warning:: + the Dataframe ``sample_frame`` has to follow the column structure of this header: + Header of sample_frame = [Time | Variable values] + """ + sample_frame = copy.deepcopy(sample_frame) + sample_frame.iloc[:, 0] = sample_frame.iloc[:, 0].diff().shift(-1) + shifted_cols = sample_frame[columns_header].shift(-1).fillna(0).astype('int32') + shifted_cols.columns = shifted_cols_header + sample_frame = sample_frame.assign(**shifted_cols) + sample_frame.drop(sample_frame.tail(1).index, inplace=True) + return sample_frame + + def compute_row_delta_in_all_samples_frames(self, df_samples_list: typing.List) -> None: + """Calls the method ``compute_row_delta_sigle_samples_frame`` on every dataframe present in the list + ``df_samples_list``. + Concatenates the result in the dataframe ``concatanated_samples`` + + :param df_samples_list: the datframe's list to be processed and concatenated + :type df_samples_list: List + + .. warning:: + The Dataframe sample_frame has to follow the column structure of this header: + Header of sample_frame = [Time | Variable values] + The class member self._sorter HAS to be properly INITIALIZED (See class members definition doc) + .. note:: + After the call of this method the class member ``concatanated_samples`` will contain all processed + and merged trajectories + """ + if not self._sorter: + raise RuntimeError("The class member self._sorter has to be INITIALIZED!") + shifted_cols_header = [s + "S" for s in self._sorter] + compute_row_delta = self.compute_row_delta_sigle_samples_frame + proc_samples_list = [compute_row_delta(sample, self._sorter, shifted_cols_header) + for sample in df_samples_list] + self._concatenated_samples = pd.concat(proc_samples_list) + + complete_header = self._sorter[:] + complete_header.insert(0,'Time') + complete_header.extend(shifted_cols_header) + self._concatenated_samples = self._concatenated_samples[complete_header] + + def build_list_of_samples_array(self, concatenated_sample: pd.DataFrame) -> typing.List: + """Builds a List containing the the delta times numpy array, and the complete transitions matrix + + :param concatenated_sample: the dataframe/array from which the time, and transitions matrix have to be extracted + and converted + :type concatenated_sample: pandas.Dataframe + :return: the resulting list of numpy arrays + :rtype: List + """ + + concatenated_array = concatenated_sample.to_numpy() + columns_list = [concatenated_array[:, 0], concatenated_array[:, 1:].astype(int)] + + return columns_list + + def clear_concatenated_frame(self) -> None: + """Removes all values in the dataframe concatenated_samples. + """ + if isinstance(self._concatenated_samples, pd.DataFrame): + self._concatenated_samples = self._concatenated_samples.iloc[0:0] + + @abstractmethod + def dataset_id(self) -> object: + """If the original dataset contains multiple dataset, this method returns a unique id to identify the current + dataset + """ + pass + + @property + def concatenated_samples(self) -> pd.DataFrame: + return self._concatenated_samples + + @property + def variables(self) -> pd.DataFrame: + return self._df_variables + + @property + def structure(self) -> pd.DataFrame: + return self._df_structure + + @property + def sorter(self) -> typing.List: + return self._sorter + + @property + def file_path(self) -> str: + return self._file_path diff --git a/PyCTBN/PyCTBN/utility/cache.py b/PyCTBN/PyCTBN/utility/cache.py new file mode 100644 index 0000000..8e0369b --- /dev/null +++ b/PyCTBN/PyCTBN/utility/cache.py @@ -0,0 +1,58 @@ + +import typing + +from ..structure_graph.set_of_cims import SetOfCims + + +class Cache: + """This class acts as a cache of ``SetOfCims`` objects for a node. + + :__list_of_sets_of_parents: a list of ``Sets`` objects of the parents to which the cim in cache at SAME + index is related + :__actual_cache: a list of setOfCims objects + """ + + def __init__(self): + """Constructor Method + """ + self._list_of_sets_of_parents = [] + self._actual_cache = [] + + def find(self, parents_comb: typing.Set): #typing.Union[typing.Set, str] + """ + Tries to find in cache given the symbolic parents combination ``parents_comb`` the ``SetOfCims`` + related to that ``parents_comb``. + + :param parents_comb: the parents related to that ``SetOfCims`` + :type parents_comb: Set + :return: A ``SetOfCims`` object if the ``parents_comb`` index is found in ``__list_of_sets_of_parents``. + None otherwise. + :rtype: SetOfCims + """ + try: + #print("Cache State:", self.list_of_sets_of_indxs) + #print("Look For:", parents_comb) + result = self._actual_cache[self._list_of_sets_of_parents.index(parents_comb)] + #print("CACHE HIT!!!!", parents_comb) + return result + except ValueError: + return None + + def put(self, parents_comb: typing.Set, socim: SetOfCims): + """Place in cache the ``SetOfCims`` object, and the related symbolic index ``parents_comb`` in + ``__list_of_sets_of_parents``. + + :param parents_comb: the symbolic set index + :type parents_comb: Set + :param socim: the related SetOfCims object + :type socim: SetOfCims + """ + #print("Putting in cache:", parents_comb) + self._list_of_sets_of_parents.append(parents_comb) + self._actual_cache.append(socim) + + def clear(self): + """Clear the contents both of ``__actual_cache`` and ``__list_of_sets_of_parents``. + """ + del self._list_of_sets_of_parents[:] + del self._actual_cache[:] \ No newline at end of file diff --git a/PyCTBN/PyCTBN/utility/json_importer.py b/PyCTBN/PyCTBN/utility/json_importer.py new file mode 100644 index 0000000..edff212 --- /dev/null +++ b/PyCTBN/PyCTBN/utility/json_importer.py @@ -0,0 +1,176 @@ +import json +import typing + +import pandas as pd + + +from .abstract_importer import AbstractImporter + + +class JsonImporter(AbstractImporter): + """Implements the abstracts methods of AbstractImporter and adds all the necessary methods to process and prepare + the data in json extension. + + :param file_path: the path of the file that contains tha data to be imported + :type file_path: string + :param samples_label: the reference key for the samples in the trajectories + :type samples_label: string + :param structure_label: the reference key for the structure of the network data + :type structure_label: string + :param variables_label: the reference key for the cardinalites of the nodes data + :type variables_label: string + :param time_key: the key used to identify the timestamps in each trajectory + :type time_key: string + :param variables_key: the key used to identify the names of the variables in the net + :type variables_key: string + :_array_indx: the index of the outer JsonArray to extract the data from + :type _array_indx: int + :_df_samples_list: a Dataframe list in which every dataframe contains a trajectory + :_raw_data: The raw contents of the json file to import + :type _raw_data: List + """ + + def __init__(self, file_path: str, samples_label: str, structure_label: str, variables_label: str, time_key: str, + variables_key: str): + """Constructor method + + .. note:: + This constructor calls also the method ``read_json_file()``, so after the construction of the object + the class member ``_raw_data`` will contain the raw imported json data. + + """ + self._samples_label = samples_label + self._structure_label = structure_label + self._variables_label = variables_label + self._time_key = time_key + self._variables_key = variables_key + self._df_samples_list = None + self._array_indx = None + super(JsonImporter, self).__init__(file_path) + self._raw_data = self.read_json_file() + + def import_data(self, indx: int) -> None: + """Implements the abstract method of :class:`AbstractImporter`. + + :param indx: the index of the outer JsonArray to extract the data from + :type indx: int + """ + self._array_indx = indx + self._df_samples_list = self.import_trajectories(self._raw_data) + self._sorter = self.build_sorter(self._df_samples_list[0]) + self.compute_row_delta_in_all_samples_frames(self._df_samples_list) + self.clear_data_frame_list() + self._df_structure = self.import_structure(self._raw_data) + self._df_variables = self.import_variables(self._raw_data) + + def import_trajectories(self, raw_data: typing.List) -> typing.List: + """Imports the trajectories from the list of dicts ``raw_data``. + + :param raw_data: List of Dicts + :type raw_data: List + :return: List of dataframes containing all the trajectories + :rtype: List + """ + return self.normalize_trajectories(raw_data, self._array_indx, self._samples_label) + + def import_structure(self, raw_data: typing.List) -> pd.DataFrame: + """Imports in a dataframe the data in the list raw_data at the key ``_structure_label`` + + :param raw_data: List of Dicts + :type raw_data: List + :return: Dataframe containg the starting node a ending node of every arc of the network + :rtype: pandas.Dataframe + """ + return self.one_level_normalizing(raw_data, self._array_indx, self._structure_label) + + def import_variables(self, raw_data: typing.List) -> pd.DataFrame: + """Imports the data in ``raw_data`` at the key ``_variables_label``. + + :param raw_data: List of Dicts + :type raw_data: List + :return: Datframe containg the variables simbolic labels and their cardinalities + :rtype: pandas.Dataframe + """ + return self.one_level_normalizing(raw_data, self._array_indx, self._variables_label) + + def read_json_file(self) -> typing.List: + """Reads the JSON file in the path self.filePath. + + :return: The contents of the json file + :rtype: List + """ + with open(self._file_path) as f: + data = json.load(f) + return data + + def one_level_normalizing(self, raw_data: typing.List, indx: int, key: str) -> pd.DataFrame: + """Extracts the one-level nested data in the list ``raw_data`` at the index ``indx`` at the key ``key``. + + :param raw_data: List of Dicts + :type raw_data: List + :param indx: The index of the array from which the data have to be extracted + :type indx: int + :param key: the key for the Dicts from which exctract data + :type key: string + :return: A normalized dataframe + :rtype: pandas.Datframe + """ + return pd.DataFrame(raw_data[indx][key]) + + def normalize_trajectories(self, raw_data: typing.List, indx: int, trajectories_key: str) -> typing.List: + """ + Extracts the trajectories in ``raw_data`` at the index ``index`` at the key ``trajectories key``. + + :param raw_data: List of Dicts + :type raw_data: List + :param indx: The index of the array from which the data have to be extracted + :type indx: int + :param trajectories_key: the key of the trajectories objects + :type trajectories_key: string + :return: A list of daframes containg the trajectories + :rtype: List + """ + dataframe = pd.DataFrame + smps = raw_data[indx][trajectories_key] + df_samples_list = [dataframe(sample) for sample in smps] + return df_samples_list + + def build_sorter(self, sample_frame: pd.DataFrame) -> typing.List: + """Implements the abstract method build_sorter of the :class:`AbstractImporter` for this dataset. + """ + columns_header = list(sample_frame.columns.values) + columns_header.remove(self._time_key) + return columns_header + + def clear_data_frame_list(self) -> None: + """Removes all values present in the dataframes in the list ``_df_samples_list``. + """ + for indx in range(len(self._df_samples_list)): + self._df_samples_list[indx] = self._df_samples_list[indx].iloc[0:0] + + def dataset_id(self) -> object: + return self._array_indx + + def import_sampled_cims(self, raw_data: typing.List, indx: int, cims_key: str) -> typing.Dict: + """Imports the synthetic CIMS in the dataset in a dictionary, using variables labels + as keys for the set of CIMS of a particular node. + + :param raw_data: List of Dicts + :type raw_data: List + :param indx: The index of the array from which the data have to be extracted + :type indx: int + :param cims_key: the key where the json object cims are placed + :type cims_key: string + :return: a dictionary containing the sampled CIMS for all the variables in the net + :rtype: Dictionary + """ + cims_for_all_vars = {} + for var in raw_data[indx][cims_key]: + sampled_cims_list = [] + cims_for_all_vars[var] = sampled_cims_list + for p_comb in raw_data[indx][cims_key][var]: + cims_for_all_vars[var].append(pd.DataFrame(raw_data[indx][cims_key][var][p_comb]).to_numpy()) + return cims_for_all_vars + + + diff --git a/PyCTBN/PyCTBN/utility/sample_importer.py b/PyCTBN/PyCTBN/utility/sample_importer.py new file mode 100644 index 0000000..05073c8 --- /dev/null +++ b/PyCTBN/PyCTBN/utility/sample_importer.py @@ -0,0 +1,65 @@ +import json +import typing + +import pandas as pd +import numpy as np + +from .abstract_importer import AbstractImporter + + + +class SampleImporter(AbstractImporter): + """Implements the abstracts methods of AbstractImporter and adds all the necessary methods to process and prepare + the data loaded directly by using DataFrame + + :param trajectory_list: the data that describes the trajectories + :type trajectory_list: typing.Union[pd.DataFrame, np.ndarray, typing.List] + :param variables: the data that describes the variables with name and cardinality + :type variables: typing.Union[pd.DataFrame, np.ndarray, typing.List] + :param prior_net_structure: the data of the real structure, if it exists + :type prior_net_structure: typing.Union[pd.DataFrame, np.ndarray, typing.List] + + :_df_samples_list: a Dataframe list in which every dataframe contains a trajectory + :_raw_data: The raw contents of the json file to import + :type _raw_data: List + """ + + def __init__(self, + trajectory_list: typing.Union[pd.DataFrame, np.ndarray, typing.List] = None, + variables: typing.Union[pd.DataFrame, np.ndarray, typing.List] = None, + prior_net_structure: typing.Union[pd.DataFrame, np.ndarray,typing.List] = None): + + 'If the data are not DataFrame, it will be converted' + if isinstance(variables,list) or isinstance(variables,np.ndarray): + variables = pd.DataFrame(variables) + if isinstance(variables,list) or isinstance(variables,np.ndarray): + prior_net_structure=pd.DataFrame(prior_net_structure) + + super(SampleImporter, self).__init__(trajectory_list =trajectory_list, + variables= variables, + prior_net_structure=prior_net_structure) + + def import_data(self, header_column = None): + + if header_column is not None: + self._sorter = header_column + else: + self._sorter = self.build_sorter(self._df_samples_list[0]) + + samples_list= self._df_samples_list + + if isinstance(samples_list, np.ndarray): + samples_list = samples_list.tolist() + + self.compute_row_delta_in_all_samples_frames(samples_list) + + def build_sorter(self, sample_frame: pd.DataFrame) -> typing.List: + """Implements the abstract method build_sorter of the :class:`AbstractImporter` in order to get the ordered variables list. + """ + columns_header = list(sample_frame.columns.values) + del columns_header[0] + return columns_header + + + def dataset_id(self) -> object: + pass \ No newline at end of file diff --git a/PyCTBN/basic_main.py b/PyCTBN/basic_main.py new file mode 100644 index 0000000..b1288db --- /dev/null +++ b/PyCTBN/basic_main.py @@ -0,0 +1,39 @@ +import glob +import os + +import sys +sys.path.append("./PyCTBN/") + +import structure_graph.network_graph as ng +import structure_graph.sample_path as sp +import structure_graph.set_of_cims as sofc +import estimators.parameters_estimator as pe +import utility.json_importer as ji + + +def main(): + read_files = glob.glob(os.path.join('./data', "*.json")) #Take all json files in this dir + #import data + importer = ji.JsonImporter(read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + #Create a SamplePath Obj + s1 = sp.SamplePath(importer) + #Build The trajectries and the structural infos + s1.build_trajectories() + s1.build_structure() + #From The Structure Object build the Graph + g = ng.NetworkGraph(s1.structure) + #Select a node you want to estimate the parameters + node = g.nodes[1] + #Init the graph specifically for THIS node + g.fast_init(node) + #Use SamplePath and Grpah to create a ParametersEstimator Object + p1 = pe.ParametersEstimator(s1, g) + #Init the peEst specifically for THIS node + p1.fast_init(node) + #Compute the parameters + sofc1 = p1.compute_parameters_for_node(node) + #The est CIMS are inside the resultant SetOfCIms Obj + print(sofc1.actual_cims) + +if __name__ == "__main__": + main() diff --git a/PyCTBN/build/lib/PyCTBN/__init__.py b/PyCTBN/build/lib/PyCTBN/__init__.py new file mode 100644 index 0000000..023c0f1 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/__init__.py @@ -0,0 +1,8 @@ +import PyCTBN.estimators +from PyCTBN.estimators import * +import PyCTBN.optimizers +from PyCTBN.optimizers import * +import PyCTBN.structure_graph +from PyCTBN.structure_graph import * +import PyCTBN.utility +from PyCTBN.utility import * \ No newline at end of file diff --git a/PyCTBN/build/lib/PyCTBN/estimators/__init__.py b/PyCTBN/build/lib/PyCTBN/estimators/__init__.py new file mode 100644 index 0000000..112086f --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/estimators/__init__.py @@ -0,0 +1,5 @@ +from .fam_score_calculator import FamScoreCalculator +from .parameters_estimator import ParametersEstimator +from .structure_estimator import StructureEstimator +from .structure_constraint_based_estimator import StructureConstraintBasedEstimator +from .structure_score_based_estimator import StructureScoreBasedEstimator diff --git a/PyCTBN/build/lib/PyCTBN/estimators/fam_score_calculator.py b/PyCTBN/build/lib/PyCTBN/estimators/fam_score_calculator.py new file mode 100644 index 0000000..5b0b591 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/estimators/fam_score_calculator.py @@ -0,0 +1,272 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph + +from math import log + +from scipy.special import loggamma +from random import choice + +from ..structure_graph.set_of_cims import SetOfCims +from ..structure_graph.network_graph import NetworkGraph +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix + + +''' + +''' + + +class FamScoreCalculator: + """ + Has the task of calculating the FamScore of a node by using a Bayesian score function + """ + + def __init__(self): + #np.seterr('raise') + pass + + # region theta + + def marginal_likelihood_theta(self, + cims: ConditionalIntensityMatrix, + alpha_xu: float, + alpha_xxu: float): + """ + Calculate the FamScore value of the node identified by the label node_id + + :param cims: np.array with all the node's cims + :type cims: np.array + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type alpha_xu: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta + :rtype: float + """ + return np.sum( + [self.variable_cim_xu_marginal_likelihood_theta(cim, + alpha_xu, + alpha_xxu) + for cim in cims]) + + def variable_cim_xu_marginal_likelihood_theta(self, + cim: ConditionalIntensityMatrix, + alpha_xu: float, + alpha_xxu: float): + """ + Calculate the value of the marginal likelihood over theta given a cim + + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type alpha_xu: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta + :rtype: float + """ + + 'get cim length' + values = len(cim._state_residence_times) + + 'compute the marginal likelihood for the current cim' + return np.sum([ + self.single_cim_xu_marginal_likelihood_theta( + index, + cim, + alpha_xu, + alpha_xxu) + for index in range(values)]) + + def single_cim_xu_marginal_likelihood_theta(self, + index: int, + cim: ConditionalIntensityMatrix, + alpha_xu: float, + alpha_xxu: float): + """ + Calculate the marginal likelihood on q of the node when assumes a specif value + and a specif parents's assignment + + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta when the node assumes a specif value + :rtype: float + """ + + values = list(range(len(cim._state_residence_times))) + + 'remove the index because of the x != x^ condition in the summation ' + values.remove(index) + + 'uncomment for alpha xx not uniform' + #alpha_xxu = alpha_xu * cim.state_transition_matrix[index,index_x_first] / cim.state_transition_matrix[index, index]) + + return (loggamma(alpha_xu) - loggamma(alpha_xu + cim.state_transition_matrix[index, index])) \ + + \ + np.sum([self.single_internal_cim_xxu_marginal_likelihood_theta( + cim.state_transition_matrix[index,index_x_first], + alpha_xxu) + for index_x_first in values]) + + + def single_internal_cim_xxu_marginal_likelihood_theta(self, + M_xxu_suff_stats: float, + alpha_xxu: float=1): + """Calculate the second part of the marginal likelihood over theta formula + + :param M_xxu_suff_stats: value of the suffucient statistic M[xx'|u] + :type M_xxu_suff_stats: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta when the node assumes a specif value + :rtype: float + """ + return loggamma(alpha_xxu+M_xxu_suff_stats) - loggamma(alpha_xxu) + + # endregion + + # region q + + def marginal_likelihood_q(self, + cims: np.array, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the value of the marginal likelihood over q of the node identified by the label node_id + + :param cims: np.array with all the node's cims + :type cims: np.array + :param tau_xu: hyperparameter over the CTBN’s q parameters + :type tau_xu: float + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + + + :return: the value of the marginal likelihood over q + :rtype: float + """ + + return np.sum([self.variable_cim_xu_marginal_likelihood_q(cim, tau_xu, alpha_xu) for cim in cims]) + + def variable_cim_xu_marginal_likelihood_q(self, + cim: ConditionalIntensityMatrix, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the value of the marginal likelihood over q given a cim + + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param tau_xu: hyperparameter over the CTBN’s q parameters + :type tau_xu: float + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + + + :return: the value of the marginal likelihood over q + :rtype: float + """ + + 'get cim length' + values=len(cim._state_residence_times) + + 'compute the marginal likelihood for the current cim' + return np.sum([ + self.single_cim_xu_marginal_likelihood_q( + cim.state_transition_matrix[index, index], + cim._state_residence_times[index], + tau_xu, + alpha_xu) + for index in range(values)]) + + + def single_cim_xu_marginal_likelihood_q(self, + M_xu_suff_stats: float, + T_xu_suff_stats: float, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the marginal likelihood on q of the node when assumes a specif value + and a specif parents's assignment + + :param M_xu_suff_stats: value of the suffucient statistic M[x|u] + :type M_xxu_suff_stats: float + :param T_xu_suff_stats: value of the suffucient statistic T[x|u] + :type T_xu_suff_stats: float + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param tau_xu: hyperparameter over the CTBN’s q parameters + :type tau_xu: float + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + + + :return: the value of the marginal likelihood of the node when assumes a specif value + :rtype: float + """ + return ( + loggamma(alpha_xu + M_xu_suff_stats + 1) + + (log(tau_xu) + * + (alpha_xu+1)) + ) \ + - \ + (loggamma(alpha_xu + 1)+( + log(tau_xu + T_xu_suff_stats) + * + (alpha_xu + M_xu_suff_stats + 1)) + ) + + # end region + + def get_fam_score(self, + cims: np.array, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the FamScore value of the node + + + :param cims: np.array with all the node's cims + :type cims: np.array + :param tau_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type tau_xu: float, optional + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 1 + :type alpha_xu: float, optional + + + :return: the FamScore value of the node + :rtype: float + """ + #print("------") + #print(self.marginal_likelihood_q(cims, + # tau_xu, + # alpha_xu)) + + #print(self.marginal_likelihood_theta(cims, + # alpha_xu, + # alpha_xxu)) + 'calculate alpha_xxu as a uniform distribution' + alpha_xxu = alpha_xu /(len(cims[0]._state_residence_times) - 1) + + return self.marginal_likelihood_q(cims, + tau_xu, + alpha_xu) \ + + \ + self.marginal_likelihood_theta(cims, + alpha_xu, + alpha_xxu) diff --git a/PyCTBN/build/lib/PyCTBN/estimators/parameters_estimator.py b/PyCTBN/build/lib/PyCTBN/estimators/parameters_estimator.py new file mode 100644 index 0000000..4754d58 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/estimators/parameters_estimator.py @@ -0,0 +1,143 @@ +import sys +sys.path.append('../') +import numpy as np + +from ..structure_graph.network_graph import NetworkGraph +from ..structure_graph.set_of_cims import SetOfCims +from ..structure_graph.trajectory import Trajectory + + +class ParametersEstimator(object): + """Has the task of computing the cims of particular node given the trajectories and the net structure + in the graph ``_net_graph``. + + :param trajectories: the trajectories + :type trajectories: Trajectory + :param net_graph: the net structure + :type net_graph: NetworkGraph + :_single_set_of_cims: the set of cims object that will hold the cims of the node + """ + + def __init__(self, trajectories: Trajectory, net_graph: NetworkGraph): + """Constructor Method + """ + self._trajectories = trajectories + self._net_graph = net_graph + self._single_set_of_cims = None + + def fast_init(self, node_id: str) -> None: + """Initializes all the necessary structures for the parameters estimation for the node ``node_id``. + + :param node_id: the node label + :type node_id: string + """ + p_vals = self._net_graph._aggregated_info_about_nodes_parents[2] + node_states_number = self._net_graph.get_states_number(node_id) + self._single_set_of_cims = SetOfCims(node_id, p_vals, node_states_number, self._net_graph.p_combs) + + def compute_parameters_for_node(self, node_id: str) -> SetOfCims: + """Compute the CIMS of the node identified by the label ``node_id``. + + :param node_id: the node label + :type node_id: string + :return: A SetOfCims object filled with the computed CIMS + :rtype: SetOfCims + """ + node_indx = self._net_graph.get_node_indx(node_id) + state_res_times = self._single_set_of_cims._state_residence_times + transition_matrices = self._single_set_of_cims._transition_matrices + ParametersEstimator.compute_state_res_time_for_node(self._trajectories.times, + self._trajectories.trajectory, + self._net_graph.time_filtering, + self._net_graph.time_scalar_indexing_strucure, + state_res_times) + ParametersEstimator.compute_state_transitions_for_a_node(node_indx, self._trajectories.complete_trajectory, + self._net_graph.transition_filtering, + self._net_graph.transition_scalar_indexing_structure, + transition_matrices) + self._single_set_of_cims.build_cims(state_res_times, transition_matrices) + return self._single_set_of_cims + + @staticmethod + def compute_state_res_time_for_node(times: np.ndarray, trajectory: np.ndarray, + cols_filter: np.ndarray, scalar_indexes_struct: np.ndarray, + T: np.ndarray) -> None: + """Compute the state residence times for a node and fill the matrix ``T`` with the results + + :param node_indx: the index of the node + :type node_indx: int + :param times: the times deltas vector + :type times: numpy.array + :param trajectory: the trajectory + :type trajectory: numpy.ndArray + :param cols_filter: the columns filtering structure + :type cols_filter: numpy.array + :param scalar_indexes_struct: the indexing structure + :type scalar_indexes_struct: numpy.array + :param T: the state residence times vectors + :type T: numpy.ndArray + """ + T[:] = np.bincount(np.sum(trajectory[:, cols_filter] * scalar_indexes_struct / scalar_indexes_struct[0], axis=1) + .astype(np.int), \ + times, + minlength=scalar_indexes_struct[-1]).reshape(-1, T.shape[1]) + + @staticmethod + def compute_state_transitions_for_a_node(node_indx: int, trajectory: np.ndarray, cols_filter: np.ndarray, + scalar_indexing: np.ndarray, M: np.ndarray) -> None: + """Compute the state residence times for a node and fill the matrices ``M`` with the results. + + :param node_indx: the index of the node + :type node_indx: int + :param trajectory: the trajectory + :type trajectory: numpy.ndArray + :param cols_filter: the columns filtering structure + :type cols_filter: numpy.array + :param scalar_indexing: the indexing structure + :type scalar_indexing: numpy.array + :param M: the state transitions matrices + :type M: numpy.ndArray + """ + diag_indices = np.array([x * M.shape[1] + x % M.shape[1] for x in range(M.shape[0] * M.shape[1])], + dtype=np.int64) + trj_tmp = trajectory[trajectory[:, int(trajectory.shape[1] / 2) + node_indx].astype(np.int) >= 0] + M[:] = np.bincount(np.sum(trj_tmp[:, cols_filter] * scalar_indexing / scalar_indexing[0], axis=1).astype(np.int) + , minlength=scalar_indexing[-1]).reshape(-1, M.shape[1], M.shape[2]) + M_raveled = M.ravel() + M_raveled[diag_indices] = 0 + M_raveled[diag_indices] = np.sum(M, axis=2).ravel() + + def init_sets_cims_container(self): + self.sets_of_cims_struct = acims.SetsOfCimsContainer(self.net_graph.nodes, + self.net_graph.nodes_values, + self.net_graph.get_ordered_by_indx_parents_values_for_all_nodes(), + self.net_graph.p_combs) + + def compute_parameters(self): + #print(self.net_graph.get_nodes()) + #print(self.amalgamated_cims_struct.sets_of_cims) + #enumerate(zip(self.net_graph.get_nodes(), self.amalgamated_cims_struct.sets_of_cims)) + for indx, aggr in enumerate(zip(self.net_graph.nodes, self.sets_of_cims_struct.sets_of_cims)): + #print(self.net_graph.time_filtering[indx]) + #print(self.net_graph.time_scalar_indexing_strucure[indx]) + self.compute_state_res_time_for_node(self.net_graph.get_node_indx(aggr[0]), self.sample_path.trajectories.times, + self.sample_path.trajectories.trajectory, + self.net_graph.time_filtering[indx], + self.net_graph.time_scalar_indexing_strucure[indx], + aggr[1]._state_residence_times) + #print(self.net_graph.transition_filtering[indx]) + #print(self.net_graph.transition_scalar_indexing_structure[indx]) + self.compute_state_transitions_for_a_node(self.net_graph.get_node_indx(aggr[0]), + self.sample_path.trajectories.complete_trajectory, + self.net_graph.transition_filtering[indx], + self.net_graph.transition_scalar_indexing_structure[indx], + aggr[1]._transition_matrices) + aggr[1].build_cims(aggr[1]._state_residence_times, aggr[1]._transition_matrices) + + + + + + + + diff --git a/PyCTBN/build/lib/PyCTBN/estimators/structure_constraint_based_estimator.py b/PyCTBN/build/lib/PyCTBN/estimators/structure_constraint_based_estimator.py new file mode 100644 index 0000000..7d5721e --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/estimators/structure_constraint_based_estimator.py @@ -0,0 +1,238 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph +import os +from scipy.stats import chi2 as chi2_dist +from scipy.stats import f as f_dist +from tqdm import tqdm + +from ..utility.cache import Cache +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix +from ..structure_graph.network_graph import NetworkGraph +from .parameters_estimator import ParametersEstimator +from .structure_estimator import StructureEstimator +from ..structure_graph.sample_path import SamplePath +from ..structure_graph.structure import Structure +from ..optimizers.constraint_based_optimizer import ConstraintBasedOptimizer + +import concurrent.futures + + + +import multiprocessing +from multiprocessing import Pool + + +class StructureConstraintBasedEstimator(StructureEstimator): + """ + Has the task of estimating the network structure given the trajectories in samplepath by using a constraint-based approach. + + :param sample_path: the _sample_path object containing the trajectories and the real structure + :type sample_path: SamplePath + :param exp_test_alfa: the significance level for the exponential Hp test + :type exp_test_alfa: float + :param chi_test_alfa: the significance level for the chi Hp test + :type chi_test_alfa: float + :_nodes: the nodes labels + :_nodes_vals: the nodes cardinalities + :_nodes_indxs: the nodes indexes + :_complete_graph: the complete directed graph built using the nodes labels in ``_nodes`` + :_cache: the Cache object + """ + + def __init__(self, sample_path: SamplePath, exp_test_alfa: float, chi_test_alfa: float,known_edges: typing.List= [],thumb_threshold:int = 25): + super().__init__(sample_path,known_edges) + self._exp_test_sign = exp_test_alfa + self._chi_test_alfa = chi_test_alfa + self._thumb_threshold = thumb_threshold + self._cache = Cache() + + def complete_test(self, test_parent: str, test_child: str, parent_set: typing.List, child_states_numb: int, + tot_vars_count: int, parent_indx, child_indx) -> bool: + """Performs a complete independence test on the directed graphs G1 = {test_child U parent_set} + G2 = {G1 U test_parent} (added as an additional parent of the test_child). + Generates all the necessary structures and datas to perform the tests. + + :param test_parent: the node label of the test parent + :type test_parent: string + :param test_child: the node label of the child + :type test_child: string + :param parent_set: the common parent set + :type parent_set: List + :param child_states_numb: the cardinality of the ``test_child`` + :type child_states_numb: int + :param tot_vars_count: the total number of variables in the net + :type tot_vars_count: int + :return: True iff test_child and test_parent are independent given the sep_set parent_set. False otherwise + :rtype: bool + """ + p_set = parent_set[:] + complete_info = parent_set[:] + complete_info.append(test_child) + + parents = np.array(parent_set) + parents = np.append(parents, test_parent) + sorted_parents = self._nodes[np.isin(self._nodes, parents)] + cims_filter = sorted_parents != test_parent + + p_set.insert(0, test_parent) + sofc2 = self._cache.find(set(p_set)) + + if not sofc2: + complete_info.append(test_parent) + bool_mask2 = np.isin(self._nodes, complete_info) + l2 = list(self._nodes[bool_mask2]) + indxs2 = self._nodes_indxs[bool_mask2] + vals2 = self._nodes_vals[bool_mask2] + eds2 = list(itertools.product(p_set, test_child)) + s2 = Structure(l2, indxs2, vals2, eds2, tot_vars_count) + g2 = NetworkGraph(s2) + g2.fast_init(test_child) + p2 = ParametersEstimator(self._sample_path.trajectories, g2) + p2.fast_init(test_child) + sofc2 = p2.compute_parameters_for_node(test_child) + self._cache.put(set(p_set), sofc2) + + del p_set[0] + sofc1 = self._cache.find(set(p_set)) + if not sofc1: + g2.remove_node(test_parent) + g2.fast_init(test_child) + p2 = ParametersEstimator(self._sample_path.trajectories, g2) + p2.fast_init(test_child) + sofc1 = p2.compute_parameters_for_node(test_child) + self._cache.put(set(p_set), sofc1) + thumb_value = 0.0 + if child_states_numb > 2: + parent_val = self._sample_path.structure.get_states_number(test_parent) + bool_mask_vals = np.isin(self._nodes, parent_set) + parents_vals = self._nodes_vals[bool_mask_vals] + thumb_value = self.compute_thumb_value(parent_val, child_states_numb, parents_vals) + for cim1, p_comb in zip(sofc1.actual_cims, sofc1.p_combs): + cond_cims = sofc2.filter_cims_with_mask(cims_filter, p_comb) + for cim2 in cond_cims: + if not self.independence_test(child_states_numb, cim1, cim2, thumb_value, parent_indx, child_indx): + return False + return True + + def independence_test(self, child_states_numb: int, cim1: ConditionalIntensityMatrix, + cim2: ConditionalIntensityMatrix, thumb_value: float, parent_indx, child_indx) -> bool: + """Compute the actual independence test using two cims. + It is performed first the exponential test and if the null hypothesis is not rejected, + it is performed also the chi_test. + + :param child_states_numb: the cardinality of the test child + :type child_states_numb: int + :param cim1: a cim belonging to the graph without test parent + :type cim1: ConditionalIntensityMatrix + :param cim2: a cim belonging to the graph with test parent + :type cim2: ConditionalIntensityMatrix + :return: True iff both tests do NOT reject the null hypothesis of independence. False otherwise. + :rtype: bool + """ + M1 = cim1.state_transition_matrix + M2 = cim2.state_transition_matrix + r1s = M1.diagonal() + r2s = M2.diagonal() + C1 = cim1.cim + C2 = cim2.cim + if child_states_numb > 2: + if (np.sum(np.diagonal(M1)) / thumb_value) < self._thumb_threshold: + self._removable_edges_matrix[parent_indx][child_indx] = False + return False + F_stats = C2.diagonal() / C1.diagonal() + exp_alfa = self._exp_test_sign + for val in range(0, child_states_numb): + if F_stats[val] < f_dist.ppf(exp_alfa / 2, r1s[val], r2s[val]) or \ + F_stats[val] > f_dist.ppf(1 - exp_alfa / 2, r1s[val], r2s[val]): + return False + M1_no_diag = M1[~np.eye(M1.shape[0], dtype=bool)].reshape(M1.shape[0], -1) + M2_no_diag = M2[~np.eye(M2.shape[0], dtype=bool)].reshape( + M2.shape[0], -1) + chi_2_quantile = chi2_dist.ppf(1 - self._chi_test_alfa, child_states_numb - 1) + Ks = np.sqrt(r1s / r2s) + Ls = np.sqrt(r2s / r1s) + for val in range(0, child_states_numb): + Chi = np.sum(np.power(Ks[val] * M2_no_diag[val] - Ls[val] *M1_no_diag[val], 2) / + (M1_no_diag[val] + M2_no_diag[val])) + if Chi > chi_2_quantile: + return False + return True + + def compute_thumb_value(self, parent_val, child_val, parent_set_vals): + """Compute the value to test against the thumb_threshold. + + :param parent_val: test parent's variable cardinality + :type parent_val: int + :param child_val: test child's variable cardinality + :type child_val: int + :param parent_set_vals: the cardinalities of the nodes in the current sep-set + :type parent_set_vals: List + :return: the thumb value for the current independence test + :rtype: int + """ + df = (child_val - 1) ** 2 + df = df * parent_val + for v in parent_set_vals: + df = df * v + return df + + def one_iteration_of_CTPC_algorithm(self, var_id: str, tot_vars_count: int)-> typing.List: + """Performs an iteration of the CTPC algorithm using the node ``var_id`` as ``test_child``. + + :param var_id: the node label of the test child + :type var_id: string + """ + optimizer_obj = ConstraintBasedOptimizer( + node_id = var_id, + structure_estimator = self, + tot_vars_count = tot_vars_count) + return optimizer_obj.optimize_structure() + + + def ctpc_algorithm(self,disable_multiprocessing:bool= False ): + """Compute the CTPC algorithm over the entire net. + """ + ctpc_algo = self.one_iteration_of_CTPC_algorithm + total_vars_numb = self._sample_path.total_variables_count + + n_nodes= len(self._nodes) + + total_vars_numb_array = [total_vars_numb] * n_nodes + + 'get the number of CPU' + cpu_count = multiprocessing.cpu_count() + + + + 'Remove all the edges from the structure' + self._sample_path.structure.clean_structure_edges() + + 'Estimate the best parents for each node' + #with multiprocessing.Pool(processes=cpu_count) as pool: + #with get_context("spawn").Pool(processes=cpu_count) as pool: + if disable_multiprocessing: + print("DISABILITATO") + cpu_count = 1 + list_edges_partial = [ctpc_algo(n,total_vars_numb) for n in self._nodes] + else: + with concurrent.futures.ProcessPoolExecutor(max_workers=cpu_count) as executor: + list_edges_partial = executor.map(ctpc_algo, + self._nodes, + total_vars_numb_array) + #list_edges_partial = [ctpc_algo(n,total_vars_numb) for n in self._nodes] + + return set(itertools.chain.from_iterable(list_edges_partial)) + + + def estimate_structure(self,disable_multiprocessing:bool=False): + return self.ctpc_algorithm(disable_multiprocessing=disable_multiprocessing) + + + + diff --git a/PyCTBN/build/lib/PyCTBN/estimators/structure_estimator.py b/PyCTBN/build/lib/PyCTBN/estimators/structure_estimator.py new file mode 100644 index 0000000..fbf8ea9 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/estimators/structure_estimator.py @@ -0,0 +1,187 @@ + +import itertools +import json +import typing + +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph + +from abc import ABC + +import abc + +from ..utility.cache import Cache +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix +from ..structure_graph.network_graph import NetworkGraph +from .parameters_estimator import ParametersEstimator +from ..structure_graph.sample_path import SamplePath +from ..structure_graph.structure import Structure + + +class StructureEstimator(object): + """Has the task of estimating the network structure given the trajectories in ``samplepath``. + + :param sample_path: the _sample_path object containing the trajectories and the real structure + :type sample_path: SamplePath + :_nodes: the nodes labels + :_nodes_vals: the nodes cardinalities + :_nodes_indxs: the nodes indexes + :_complete_graph: the complete directed graph built using the nodes labels in ``_nodes`` + """ + + def __init__(self, sample_path: SamplePath, known_edges: typing.List = None): + self._sample_path = sample_path + self._nodes = np.array(self._sample_path.structure.nodes_labels) + self._nodes_vals = self._sample_path.structure.nodes_values + self._nodes_indxs = self._sample_path.structure.nodes_indexes + self._removable_edges_matrix = self.build_removable_edges_matrix(known_edges) + self._complete_graph = StructureEstimator.build_complete_graph(self._sample_path.structure.nodes_labels) + + + def build_removable_edges_matrix(self, known_edges: typing.List): + """Builds a boolean matrix who shows if a edge could be removed or not, based on prior knowledge given: + + :param known_edges: the list of nodes labels + :type known_edges: List + :return: a boolean matrix + :rtype: np.ndarray + """ + tot_vars_count = self._sample_path.total_variables_count + complete_adj_matrix = np.full((tot_vars_count, tot_vars_count), True) + if known_edges: + for edge in known_edges: + i = self._sample_path.structure.get_node_indx(edge[0]) + j = self._sample_path.structure.get_node_indx(edge[1]) + complete_adj_matrix[i][j] = False + return complete_adj_matrix + + @staticmethod + def build_complete_graph(node_ids: typing.List) -> nx.DiGraph: + """Builds a complete directed graph (no self loops) given the nodes labels in the list ``node_ids``: + + :param node_ids: the list of nodes labels + :type node_ids: List + :return: a complete Digraph Object + :rtype: networkx.DiGraph + """ + complete_graph = nx.DiGraph() + complete_graph.add_nodes_from(node_ids) + complete_graph.add_edges_from(itertools.permutations(node_ids, 2)) + return complete_graph + + + @staticmethod + def generate_possible_sub_sets_of_size( u: typing.List, size: int, parent_label: str): + """Creates a list containing all possible subsets of the list ``u`` of size ``size``, + that do not contains a the node identified by ``parent_label``. + + :param u: the list of nodes + :type u: List + :param size: the size of the subsets + :type size: int + :param parent_label: the node to exclude in the subsets generation + :type parent_label: string + :return: an Iterator Object containing a list of lists + :rtype: Iterator + """ + list_without_test_parent = u[:] + list_without_test_parent.remove(parent_label) + return map(list, itertools.combinations(list_without_test_parent, size)) + + def save_results(self) -> None: + """Save the estimated Structure to a .json file in the path where the data are loaded from. + The file is named as the input dataset but the `results_` word is appended to the results file. + """ + res = json_graph.node_link_data(self._complete_graph) + name = self._sample_path._importer.file_path.rsplit('/', 1)[-1] + name = name.split('.', 1)[0] + name += '_' + str(self._sample_path._importer.dataset_id()) + name += '.json' + file_name = 'results_' + name + with open(file_name, 'w') as f: + json.dump(res, f) + + + def remove_diagonal_elements(self, matrix): + m = matrix.shape[0] + strided = np.lib.stride_tricks.as_strided + s0, s1 = matrix.strides + return strided(matrix.ravel()[1:], shape=(m - 1, m), strides=(s0 + s1, s1)).reshape(m, -1) + + + @abc.abstractmethod + def estimate_structure(self) -> typing.List: + """Abstract method to estimate the structure + + :return: List of estimated edges + :rtype: Typing.List + """ + pass + + + def adjacency_matrix(self) -> np.ndarray: + """Converts the estimated structure ``_complete_graph`` to a boolean adjacency matrix representation. + + :return: The adjacency matrix of the graph ``_complete_graph`` + :rtype: numpy.ndArray + """ + return nx.adj_matrix(self._complete_graph).toarray().astype(bool) + + def spurious_edges(self) -> typing.List: + """Return the spurious edges present in the estimated structure, if a prior net structure is present in + ``_sample_path.structure``. + + :return: A list containing the spurious edges + :rtype: List + """ + if not self._sample_path.has_prior_net_structure: + raise RuntimeError("Can not compute spurious edges with no prior net structure!") + real_graph = nx.DiGraph() + real_graph.add_nodes_from(self._sample_path.structure.nodes_labels) + real_graph.add_edges_from(self._sample_path.structure.edges) + return nx.difference(real_graph, self._complete_graph).edges + + def save_plot_estimated_structure_graph(self) -> None: + """Plot the estimated structure in a graphical model style. + Spurious edges are colored in red. + """ + graph_to_draw = nx.DiGraph() + spurious_edges = self.spurious_edges() + non_spurious_edges = list(set(self._complete_graph.edges) - set(spurious_edges)) + print(non_spurious_edges) + edges_colors = ['red' if edge in spurious_edges else 'black' for edge in self._complete_graph.edges] + graph_to_draw.add_edges_from(spurious_edges) + graph_to_draw.add_edges_from(non_spurious_edges) + pos = nx.spring_layout(graph_to_draw, k=0.5*1/np.sqrt(len(graph_to_draw.nodes())), iterations=50,scale=10) + options = { + "node_size": 2000, + "node_color": "white", + "edgecolors": "black", + 'linewidths':2, + "with_labels":True, + "font_size":13, + 'connectionstyle': 'arc3, rad = 0.1', + "arrowsize": 15, + "arrowstyle": '<|-', + "width": 1, + "edge_color":edges_colors, + } + + nx.draw(graph_to_draw, pos, **options) + ax = plt.gca() + ax.margins(0.20) + plt.axis("off") + name = self._sample_path._importer.file_path.rsplit('/', 1)[-1] + name = name.split('.', 1)[0] + name += '_' + str(self._sample_path._importer.dataset_id()) + name += '.png' + plt.savefig(name) + plt.clf() + print("Estimated Structure Plot Saved At: ", os.path.abspath(name)) + + + + + diff --git a/PyCTBN/build/lib/PyCTBN/estimators/structure_score_based_estimator.py b/PyCTBN/build/lib/PyCTBN/estimators/structure_score_based_estimator.py new file mode 100644 index 0000000..2903db3 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/estimators/structure_score_based_estimator.py @@ -0,0 +1,244 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph + +from random import choice + +import concurrent.futures + +import copy + +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix +from ..structure_graph.network_graph import NetworkGraph +from .parameters_estimator import ParametersEstimator +from .structure_estimator import StructureEstimator +from ..structure_graph.sample_path import SamplePath +from ..structure_graph.structure import Structure +from .fam_score_calculator import FamScoreCalculator +from ..optimizers.hill_climbing_search import HillClimbing +from ..optimizers.tabu_search import TabuSearch + + +import multiprocessing +from multiprocessing import Pool + + + + +class StructureScoreBasedEstimator(StructureEstimator): + """ + Has the task of estimating the network structure given the trajectories in samplepath by + using a score based approach. + + :param sample_path: the _sample_path object containing the trajectories and the real structure + :type sample_path: SamplePath + :param tau_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type tau_xu: float, optional + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 1 + :type alpha_xu: float, optional + :param known_edges: List of known edges, default to [] + :type known_edges: List, optional + + """ + + def __init__(self, sample_path: SamplePath, tau_xu:int=0.1, alpha_xu:int = 1,known_edges: typing.List= []): + super().__init__(sample_path,known_edges) + self.tau_xu=tau_xu + self.alpha_xu=alpha_xu + + + def estimate_structure(self, max_parents:int = None, iterations_number:int= 40, + patience:int = None, tabu_length:int = None, tabu_rules_duration:int = None, + optimizer: str = 'tabu',disable_multiprocessing:bool= False ): + """ + Compute the score-based algorithm to find the optimal structure + + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + :param tabu_length: maximum lenght of the data structures used in the optimization process, default to None + :type tabu_length: int, optional + :param tabu_rules_duration: number of iterations in which each rule keeps its value, default to None + :type tabu_rules_duration: int, optional + :param optimizer: name of the optimizer algorithm. Possible values: 'hill' (Hill climbing),'tabu' (tabu search), defualt to 'tabu' + :type optimizer: string, optional + :param disable_multiprocessing: true if you desire to disable the multiprocessing operations, default to False + :type disable_multiprocessing: Boolean, optional + """ + 'Save the true edges structure in tuples' + true_edges = copy.deepcopy(self._sample_path.structure.edges) + true_edges = set(map(tuple, true_edges)) + + 'Remove all the edges from the structure' + self._sample_path.structure.clean_structure_edges() + + estimate_parents = self.estimate_parents + + n_nodes= len(self._nodes) + + l_max_parents= [max_parents] * n_nodes + l_iterations_number = [iterations_number] * n_nodes + l_patience = [patience] * n_nodes + l_tabu_length = [tabu_length] * n_nodes + l_tabu_rules_duration = [tabu_rules_duration] * n_nodes + l_optimizer = [optimizer] * n_nodes + + + 'get the number of CPU' + cpu_count = multiprocessing.cpu_count() + print(f"CPU COUNT: {cpu_count}") + + if disable_multiprocessing: + cpu_count = 1 + + + + + + #with get_context("spawn").Pool(processes=cpu_count) as pool: + #with multiprocessing.Pool(processes=cpu_count) as pool: + + 'Estimate the best parents for each node' + if disable_multiprocessing: + list_edges_partial = [estimate_parents(n,max_parents,iterations_number,patience,tabu_length,tabu_rules_duration,optimizer) for n in self._nodes] + else: + with concurrent.futures.ProcessPoolExecutor(max_workers=cpu_count) as executor: + list_edges_partial = executor.map(estimate_parents, + self._nodes, + l_max_parents, + l_iterations_number, + l_patience, + l_tabu_length, + l_tabu_rules_duration, + l_optimizer) + + + + #list_edges_partial = p.map(estimate_parents, self._nodes) + #list_edges_partial= estimate_parents('Q',max_parents,iterations_number,patience,tabu_length,tabu_rules_duration,optimizer) + + 'Concatenate all the edges list' + set_list_edges = set(itertools.chain.from_iterable(list_edges_partial)) + + #print('-------------------------') + + + 'calculate precision and recall' + n_missing_edges = 0 + n_added_fake_edges = 0 + + try: + n_added_fake_edges = len(set_list_edges.difference(true_edges)) + + n_missing_edges = len(true_edges.difference(set_list_edges)) + + n_true_positive = len(true_edges) - n_missing_edges + + precision = n_true_positive / (n_true_positive + n_added_fake_edges) + + recall = n_true_positive / (n_true_positive + n_missing_edges) + + + # print(f"n archi reali non trovati: {n_missing_edges}") + # print(f"n archi non reali aggiunti: {n_added_fake_edges}") + print(true_edges) + print(set_list_edges) + print(f"precision: {precision} ") + print(f"recall: {recall} ") + except Exception as e: + print(f"errore: {e}") + + return set_list_edges + + + def estimate_parents(self,node_id:str, max_parents:int = None, iterations_number:int= 40, + patience:int = 10, tabu_length:int = None, tabu_rules_duration:int=5, + optimizer:str = 'hill' ): + """ + Use the FamScore of a node in order to find the best parent nodes + + :param node_id: current node's id + :type node_id: string + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + :param tabu_length: maximum lenght of the data structures used in the optimization process, default to None + :type tabu_length: int, optional + :param tabu_rules_duration: number of iterations in which each rule keeps its value, default to None + :type tabu_rules_duration: int, optional + :param optimizer: name of the optimizer algorithm. Possible values: 'hill' (Hill climbing),'tabu' (tabu search), defualt to 'tabu' + :type optimizer: string, optional + + :return: A list of the best edges for the currente node + :rtype: List + """ + + "choose the optimizer algotithm" + if optimizer == 'tabu': + optimizer = TabuSearch( + node_id = node_id, + structure_estimator = self, + max_parents = max_parents, + iterations_number = iterations_number, + patience = patience, + tabu_length = tabu_length, + tabu_rules_duration = tabu_rules_duration) + else: #if optimizer == 'hill': + optimizer = HillClimbing( + node_id = node_id, + structure_estimator = self, + max_parents = max_parents, + iterations_number = iterations_number, + patience = patience) + + "call the optmizer's function that calculates the current node's parents" + return optimizer.optimize_structure() + + + def get_score_from_graph(self, + graph: NetworkGraph, + node_id:str): + """ + Get the FamScore of a node + + :param node_id: current node's id + :type node_id: string + :param graph: current graph to be computed + :type graph: class:'NetworkGraph' + + + :return: The FamSCore for this graph structure + :rtype: float + """ + + 'inizialize the graph for a single node' + graph.fast_init(node_id) + + params_estimation = ParametersEstimator(self._sample_path.trajectories, graph) + + 'Inizialize and compute parameters for node' + params_estimation.fast_init(node_id) + SoCims = params_estimation.compute_parameters_for_node(node_id) + + 'calculate the FamScore for the node' + fam_score_obj = FamScoreCalculator() + + score = fam_score_obj.get_fam_score(SoCims.actual_cims,tau_xu = self.tau_xu,alpha_xu=self.alpha_xu) + + #print(f" lo score per {node_id} risulta: {score} ") + return score + + + + diff --git a/PyCTBN/build/lib/PyCTBN/optimizers/__init__.py b/PyCTBN/build/lib/PyCTBN/optimizers/__init__.py new file mode 100644 index 0000000..4162bf1 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/optimizers/__init__.py @@ -0,0 +1,4 @@ +from .optimizer import Optimizer +from .tabu_search import TabuSearch +from .hill_climbing_search import HillClimbing +from .constraint_based_optimizer import ConstraintBasedOptimizer \ No newline at end of file diff --git a/PyCTBN/build/lib/PyCTBN/optimizers/constraint_based_optimizer.py b/PyCTBN/build/lib/PyCTBN/optimizers/constraint_based_optimizer.py new file mode 100644 index 0000000..65bc19c --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/optimizers/constraint_based_optimizer.py @@ -0,0 +1,87 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +from random import choice + +from abc import ABC + +import copy + + +from .optimizer import Optimizer +from ..estimators.structure_estimator import StructureEstimator +from ..structure_graph.network_graph import NetworkGraph + + +class ConstraintBasedOptimizer(Optimizer): + """ + Optimizer class that implement a CTPC Algorithm + + :param node_id: current node's id + :type node_id: string + :param structure_estimator: a structure estimator object with the information about the net + :type structure_estimator: class:'StructureEstimator' + :param tot_vars_count: number of variables in the dataset + :type tot_vars_count: int + """ + def __init__(self, + node_id:str, + structure_estimator: StructureEstimator, + tot_vars_count:int + ): + """ + Constructor + """ + super().__init__(node_id, structure_estimator) + self.tot_vars_count = tot_vars_count + + + + def optimize_structure(self): + """ + Compute Optimization process for a structure_estimator by using a CTPC Algorithm + + :return: the estimated structure for the node + :rtype: List + """ + print("##################TESTING VAR################", self.node_id) + + graph = NetworkGraph(self.structure_estimator._sample_path.structure) + + other_nodes = [node for node in self.structure_estimator._sample_path.structure.nodes_labels if node != self.node_id] + + for possible_parent in other_nodes: + graph.add_edges([(possible_parent,self.node_id)]) + + + u = other_nodes + #tests_parents_numb = len(u) + #complete_frame = self.complete_graph_frame + #test_frame = complete_frame.loc[complete_frame['To'].isin([self.node_id])] + child_states_numb = self.structure_estimator._sample_path.structure.get_states_number(self.node_id) + b = 0 + while b < len(u): + parent_indx = 0 + while parent_indx < len(u): + removed = False + test_parent = u[parent_indx] + i = self.structure_estimator._sample_path.structure.get_node_indx(test_parent) + j = self.structure_estimator._sample_path.structure.get_node_indx(self.node_id) + if self.structure_estimator._removable_edges_matrix[i][j]: + S = StructureEstimator.generate_possible_sub_sets_of_size(u, b, test_parent) + for parents_set in S: + if self.structure_estimator.complete_test(test_parent, self.node_id, parents_set, child_states_numb, self.tot_vars_count,i,j): + graph.remove_edges([(test_parent, self.node_id)]) + u.remove(test_parent) + removed = True + break + if not removed: + parent_indx += 1 + b += 1 + self.structure_estimator._cache.clear() + return graph.edges \ No newline at end of file diff --git a/PyCTBN/build/lib/PyCTBN/optimizers/hill_climbing_search.py b/PyCTBN/build/lib/PyCTBN/optimizers/hill_climbing_search.py new file mode 100644 index 0000000..6783be0 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/optimizers/hill_climbing_search.py @@ -0,0 +1,135 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +from random import choice + +from abc import ABC + + +from .optimizer import Optimizer +from ..estimators.structure_estimator import StructureEstimator +from ..structure_graph.network_graph import NetworkGraph + + +class HillClimbing(Optimizer): + """ + Optimizer class that implement Hill Climbing Search + + + :param node_id: current node's id + :type node_id: string + :param structure_estimator: a structure estimator object with the information about the net + :type structure_estimator: class:'StructureEstimator' + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + + + + """ + def __init__(self, + node_id:str, + structure_estimator: StructureEstimator, + max_parents:int = None, + iterations_number:int= 40, + patience:int = None + ): + """ + Constructor + """ + super().__init__(node_id, structure_estimator) + self.max_parents = max_parents + self.iterations_number = iterations_number + self.patience = patience + + + + def optimize_structure(self) -> typing.List: + """ + Compute Optimization process for a structure_estimator by using a Hill Climbing Algorithm + + :return: the estimated structure for the node + :rtype: List + """ + + #'Create the graph for the single node' + graph = NetworkGraph(self.structure_estimator._sample_path.structure) + + 'get the index for the current node' + node_index = self.structure_estimator._sample_path._structure.get_node_indx(self.node_id) + + 'list of prior edges' + prior_parents = set() + + 'Add the edges from prior knowledge' + for i in range(len(self.structure_estimator._removable_edges_matrix)): + if not self.structure_estimator._removable_edges_matrix[i][node_index]: + parent_id= self.structure_estimator._sample_path._structure.get_node_id(i) + prior_parents.add(parent_id) + + 'Add the node to the starting structure' + graph.add_edges([(parent_id, self.node_id)]) + + + + 'get all the possible parents' + other_nodes = [node for node in + self.structure_estimator._sample_path.structure.nodes_labels if + node != self.node_id and + not prior_parents.__contains__(node)] + + actual_best_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + patince_count = 0 + for i in range(self.iterations_number): + 'choose a new random edge' + current_new_parent = choice(other_nodes) + current_edge = (current_new_parent,self.node_id) + added = False + parent_removed = None + + + if graph.has_edge(current_edge): + graph.remove_edges([current_edge]) + else: + 'check the max_parents constraint' + if self.max_parents is not None: + parents_list = graph.get_parents_by_id(self.node_id) + if len(parents_list) >= self.max_parents : + parent_removed = (choice(parents_list), self.node_id) + graph.remove_edges([parent_removed]) + graph.add_edges([current_edge]) + added = True + #print('**************************') + current_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + + if current_score > actual_best_score: + 'update current best score' + actual_best_score = current_score + patince_count = 0 + else: + 'undo the last update' + if added: + graph.remove_edges([current_edge]) + 'If a parent was removed, add it again to the graph' + if parent_removed is not None: + graph.add_edges([parent_removed]) + else: + graph.add_edges([current_edge]) + 'update patience count' + patince_count += 1 + + if self.patience is not None and patince_count > self.patience: + break + + print(f"finito variabile: {self.node_id}") + return graph.edges \ No newline at end of file diff --git a/PyCTBN/build/lib/PyCTBN/optimizers/optimizer.py b/PyCTBN/build/lib/PyCTBN/optimizers/optimizer.py new file mode 100644 index 0000000..36445c0 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/optimizers/optimizer.py @@ -0,0 +1,39 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +import abc + +from ..estimators.structure_estimator import StructureEstimator + + + +class Optimizer(abc.ABC): + """ + Interface class for all the optimizer's child PyCTBN + + :param node_id: the node label + :type node_id: string + :param structure_estimator: A structureEstimator Object to predict the structure + :type structure_estimator: class:'StructureEstimator' + + """ + + def __init__(self, node_id:str, structure_estimator: StructureEstimator): + self.node_id = node_id + self.structure_estimator = structure_estimator + + + @abc.abstractmethod + def optimize_structure(self) -> typing.List: + """ + Compute Optimization process for a structure_estimator + + :return: the estimated structure for the node + :rtype: List + """ + pass diff --git a/PyCTBN/build/lib/PyCTBN/optimizers/tabu_search.py b/PyCTBN/build/lib/PyCTBN/optimizers/tabu_search.py new file mode 100644 index 0000000..e15dd40 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/optimizers/tabu_search.py @@ -0,0 +1,199 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +from random import choice,sample + +from abc import ABC + + +from .optimizer import Optimizer +from ..estimators.structure_estimator import StructureEstimator +from ..structure_graph.network_graph import NetworkGraph + +import queue + + +class TabuSearch(Optimizer): + """ + Optimizer class that implement Tabu Search + + + :param node_id: current node's id + :type node_id: string + :param structure_estimator: a structure estimator object with the information about the net + :type structure_estimator: class:'StructureEstimator' + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + :param tabu_length: maximum lenght of the data structures used in the optimization process, default to None + :type tabu_length: int, optional + :param tabu_rules_duration: number of iterations in which each rule keeps its value, default to None + :type tabu_rules_duration: int, optional + + + """ + def __init__(self, + node_id:str, + structure_estimator: StructureEstimator, + max_parents:int = None, + iterations_number:int= 40, + patience:int = None, + tabu_length:int = None, + tabu_rules_duration = None + ): + """ + Constructor + """ + super().__init__(node_id, structure_estimator) + self.max_parents = max_parents + self.iterations_number = iterations_number + self.patience = patience + self.tabu_length = tabu_length + self.tabu_rules_duration = tabu_rules_duration + + + def optimize_structure(self) -> typing.List: + """ + Compute Optimization process for a structure_estimator by using a Hill Climbing Algorithm + + :return: the estimated structure for the node + :rtype: List + """ + print(f"tabu search is processing the structure of {self.node_id}") + + 'Create the graph for the single node' + graph = NetworkGraph(self.structure_estimator._sample_path.structure) + + 'get the index for the current node' + node_index = self.structure_estimator._sample_path._structure.get_node_indx(self.node_id) + + 'list of prior edges' + prior_parents = set() + + 'Add the edges from prior knowledge' + for i in range(len(self.structure_estimator._removable_edges_matrix)): + if not self.structure_estimator._removable_edges_matrix[i][node_index]: + parent_id= self.structure_estimator._sample_path._structure.get_node_id(i) + prior_parents.add(parent_id) + + 'Add the node to the starting structure' + graph.add_edges([(parent_id, self.node_id)]) + + + + 'get all the possible parents' + other_nodes = set([node for node in + self.structure_estimator._sample_path.structure.nodes_labels if + node != self.node_id and + not prior_parents.__contains__(node)]) + + 'calculate the score for the node without parents' + actual_best_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + + 'initialize tabu_length and tabu_rules_duration if None' + if self.tabu_length is None: + self.tabu_length = len(other_nodes) + + if self.tabu_rules_duration is None: + self.tabu_tabu_rules_durationength = len(other_nodes) + + 'inizialize the data structures' + tabu_set = set() + tabu_queue = queue.Queue() + + patince_count = 0 + tabu_count = 0 + for i in range(self.iterations_number): + + current_possible_nodes = other_nodes.difference(tabu_set) + + 'choose a new random edge according to tabu restiction' + if(len(current_possible_nodes) > 0): + current_new_parent = sample(current_possible_nodes,k=1)[0] + else: + current_new_parent = tabu_queue.get() + tabu_set.remove(current_new_parent) + + + + current_edge = (current_new_parent,self.node_id) + added = False + parent_removed = None + + if graph.has_edge(current_edge): + graph.remove_edges([current_edge]) + else: + 'check the max_parents constraint' + if self.max_parents is not None: + parents_list = graph.get_parents_by_id(self.node_id) + if len(parents_list) >= self.max_parents : + parent_removed = (choice(parents_list), self.node_id) + graph.remove_edges([parent_removed]) + graph.add_edges([current_edge]) + added = True + #print('**************************') + current_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + + # print("-------------------------------------------") + # print(f"Current new parent: {current_new_parent}") + # print(f"Current score: {current_score}") + # print(f"Current best score: {actual_best_score}") + # print(f"tabu list : {str(tabu_set)} length: {len(tabu_set)}") + # print(f"tabu queue : {str(tabu_queue)} length: {tabu_queue.qsize()}") + # print(f"graph edges: {graph.edges}") + + # print("-------------------------------------------") + # input() + if current_score > actual_best_score: + 'update current best score' + actual_best_score = current_score + patince_count = 0 + 'update tabu list' + + + else: + 'undo the last update' + if added: + graph.remove_edges([current_edge]) + 'If a parent was removed, add it again to the graph' + if parent_removed is not None: + graph.add_edges([parent_removed]) + else: + graph.add_edges([current_edge]) + 'update patience count' + patince_count += 1 + + + if tabu_queue.qsize() >= self.tabu_length: + current_removed = tabu_queue.get() + tabu_set.remove(current_removed) + 'Add the node on the tabu list' + tabu_queue.put(current_new_parent) + tabu_set.add(current_new_parent) + + tabu_count += 1 + + 'Every tabu_rules_duration step remove an item from the tabu list ' + if tabu_count % self.tabu_rules_duration == 0: + if tabu_queue.qsize() > 0: + current_removed = tabu_queue.get() + tabu_set.remove(current_removed) + tabu_count = 0 + else: + tabu_count = 0 + + if self.patience is not None and patince_count > self.patience: + break + + print(f"finito variabile: {self.node_id}") + return graph.edges \ No newline at end of file diff --git a/PyCTBN/build/lib/PyCTBN/structure_graph/__init__.py b/PyCTBN/build/lib/PyCTBN/structure_graph/__init__.py new file mode 100644 index 0000000..85f18a2 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/structure_graph/__init__.py @@ -0,0 +1,6 @@ +from .conditional_intensity_matrix import ConditionalIntensityMatrix +from .network_graph import NetworkGraph +from .sample_path import SamplePath +from .set_of_cims import SetOfCims +from .structure import Structure +from .trajectory import Trajectory \ No newline at end of file diff --git a/PyCTBN/build/lib/PyCTBN/structure_graph/conditional_intensity_matrix.py b/PyCTBN/build/lib/PyCTBN/structure_graph/conditional_intensity_matrix.py new file mode 100644 index 0000000..4abfdd0 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/structure_graph/conditional_intensity_matrix.py @@ -0,0 +1,42 @@ +import numpy as np + + +class ConditionalIntensityMatrix(object): + """Abstracts the Conditional Intesity matrix of a node as aggregation of the state residence times vector + and state transition matrix and the actual CIM matrix. + + :param state_residence_times: state residence times vector + :type state_residence_times: numpy.array + :param state_transition_matrix: the transitions count matrix + :type state_transition_matrix: numpy.ndArray + :_cim: the actual cim of the node + """ + def __init__(self, state_residence_times: np.array, state_transition_matrix: np.array): + """Constructor Method + """ + self._state_residence_times = state_residence_times + self._state_transition_matrix = state_transition_matrix + self._cim = self.state_transition_matrix.astype(np.float64) + + def compute_cim_coefficients(self) -> None: + """Compute the coefficients of the matrix _cim by using the following equality q_xx' = M[x, x'] / T[x]. + The class member ``_cim`` will contain the computed cim + """ + np.fill_diagonal(self._cim, self._cim.diagonal() * -1) + self._cim = ((self._cim.T + 1) / (self._state_residence_times + 1)).T + + @property + def state_residence_times(self) -> np.ndarray: + return self._state_residence_times + + @property + def state_transition_matrix(self) -> np.ndarray: + return self._state_transition_matrix + + @property + def cim(self) -> np.ndarray: + return self._cim + + def __repr__(self): + return 'CIM:\n' + str(self.cim) + diff --git a/PyCTBN/build/lib/PyCTBN/structure_graph/network_graph.py b/PyCTBN/build/lib/PyCTBN/structure_graph/network_graph.py new file mode 100644 index 0000000..623981d --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/structure_graph/network_graph.py @@ -0,0 +1,293 @@ + +import typing + +import networkx as nx +import numpy as np + +from .structure import Structure + + +class NetworkGraph(object): + """Abstracts the infos contained in the Structure class in the form of a directed graph. + Has the task of creating all the necessary filtering and indexing structures for parameters estimation + + :param graph_struct: the ``Structure`` object from which infos about the net will be extracted + :type graph_struct: Structure + :_graph: directed graph + :_aggregated_info_about_nodes_parents: a structure that contains all the necessary infos + about every parents of the node of which all the indexing and filtering structures will be constructed. + :_time_scalar_indexing_structure: the indexing structure for state res time estimation + :_transition_scalar_indexing_structure: the indexing structure for transition computation + :_time_filtering: the columns filtering structure used in the computation of the state res times + :_transition_filtering: the columns filtering structure used in the computation of the transition + from one state to another + :_p_combs_structure: all the possible parents states combination for the node of interest + """ + + def __init__(self, graph_struct: Structure): + """Constructor Method + """ + self._graph_struct = graph_struct + self._graph = nx.DiGraph() + self._aggregated_info_about_nodes_parents = None + self._time_scalar_indexing_structure = None + self._transition_scalar_indexing_structure = None + self._time_filtering = None + self._transition_filtering = None + self._p_combs_structure = None + + def init_graph(self): + self.add_nodes(self._nodes_labels) + self.add_edges(self.graph_struct.edges) + self.aggregated_info_about_nodes_parents = self.get_ord_set_of_par_of_all_nodes() + self._fancy_indexing = self.build_fancy_indexing_structure(0) + self.build_scalar_indexing_structures() + self.build_time_columns_filtering_structure() + self.build_transition_columns_filtering_structure() + self._p_combs_structure = self.build_p_combs_structure() + + def fast_init(self, node_id: str) -> None: + """Initializes all the necessary structures for parameters estimation of the node identified by the label + node_id + + :param node_id: the label of the node + :type node_id: string + """ + self.add_nodes(self._graph_struct.nodes_labels) + self.add_edges(self._graph_struct.edges) + self._aggregated_info_about_nodes_parents = self.get_ordered_by_indx_set_of_parents(node_id) + p_indxs = self._aggregated_info_about_nodes_parents[1] + p_vals = self._aggregated_info_about_nodes_parents[2] + node_states = self.get_states_number(node_id) + node_indx = self.get_node_indx(node_id) + cols_number = self._graph_struct.total_variables_number + self._time_scalar_indexing_structure = NetworkGraph.\ + build_time_scalar_indexing_structure_for_a_node(node_states, p_vals) + self._transition_scalar_indexing_structure = NetworkGraph.\ + build_transition_scalar_indexing_structure_for_a_node(node_states, p_vals) + self._time_filtering = NetworkGraph.build_time_columns_filtering_for_a_node(node_indx, p_indxs) + self._transition_filtering = NetworkGraph.build_transition_filtering_for_a_node(node_indx, p_indxs, cols_number) + self._p_combs_structure = NetworkGraph.build_p_comb_structure_for_a_node(p_vals) + + def add_nodes(self, list_of_nodes: typing.List) -> None: + """Adds the nodes to the ``_graph`` contained in the list of nodes ``list_of_nodes``. + Sets all the properties that identify a nodes (index, positional index, cardinality) + + :param list_of_nodes: the nodes to add to ``_graph`` + :type list_of_nodes: List + """ + nodes_indxs = self._graph_struct.nodes_indexes + nodes_vals = self._graph_struct.nodes_values + pos = 0 + for id, node_indx, node_val in zip(list_of_nodes, nodes_indxs, nodes_vals): + self._graph.add_node(id, indx=node_indx, val=node_val, pos_indx=pos) + pos += 1 + + def has_edge(self,edge:tuple)-> bool: + """ + Check if the graph contains a specific edge + + Parameters: + edge: a tuple that rappresents the edge + Returns: + bool + """ + return self._graph.has_edge(edge[0],edge[1]) + + def add_edges(self, list_of_edges: typing.List) -> None: + """Add the edges to the ``_graph`` contained in the list ``list_of_edges``. + + :param list_of_edges: the list containing of tuples containing the edges + :type list_of_edges: List + """ + self._graph.add_edges_from(list_of_edges) + + def remove_node(self, node_id: str) -> None: + """Remove the node ``node_id`` from all the class members. + Initialize all the filtering/indexing structures. + """ + self._graph.remove_node(node_id) + self._graph_struct.remove_node(node_id) + self.clear_indexing_filtering_structures() + + def clear_indexing_filtering_structures(self) -> None: + """Initialize all the filtering/indexing structures. + """ + self._aggregated_info_about_nodes_parents = None + self._time_scalar_indexing_structure = None + self._transition_scalar_indexing_structure = None + self._time_filtering = None + self._transition_filtering = None + self._p_combs_structure = None + + def get_ordered_by_indx_set_of_parents(self, node: str) -> typing.Tuple: + """Builds the aggregated structure that holds all the infos relative to the parent set of the node, namely + (parents_labels, parents_indexes, parents_cardinalities). + + :param node: the label of the node + :type node: string + :return: a tuple containing all the parent set infos + :rtype: Tuple + """ + parents = self.get_parents_by_id(node) + nodes = self._graph_struct.nodes_labels + d = {v: i for i, v in enumerate(nodes)} + sorted_parents = sorted(parents, key=lambda v: d[v]) + get_node_indx = self.get_node_indx + p_indxes = [get_node_indx(node) for node in sorted_parents] + p_values = [self.get_states_number(node) for node in sorted_parents] + return sorted_parents, p_indxes, p_values + + def remove_edges(self, list_of_edges: typing.List) -> None: + """Remove the edges to the graph contained in the list list_of_edges. + + :param list_of_edges: The edges to remove from the graph + :type list_of_edges: List + """ + self._graph.remove_edges_from(list_of_edges) + + @staticmethod + def build_time_scalar_indexing_structure_for_a_node(node_states: int, + parents_vals: typing.List) -> np.ndarray: + """Builds an indexing structure for the computation of state residence times values. + + :param node_states: the node cardinality + :type node_states: int + :param parents_vals: the caridinalites of the node's parents + :type parents_vals: List + :return: The time indexing structure + :rtype: numpy.ndArray + """ + T_vector = np.array([node_states]) + T_vector = np.append(T_vector, parents_vals) + T_vector = T_vector.cumprod().astype(np.int) + return T_vector + + @staticmethod + def build_transition_scalar_indexing_structure_for_a_node(node_states_number: int, parents_vals: typing.List) \ + -> np.ndarray: + """Builds an indexing structure for the computation of state transitions values. + + :param node_states_number: the node cardinality + :type node_states_number: int + :param parents_vals: the caridinalites of the node's parents + :type parents_vals: List + :return: The transition indexing structure + :rtype: numpy.ndArray + """ + M_vector = np.array([node_states_number, + node_states_number]) + M_vector = np.append(M_vector, parents_vals) + M_vector = M_vector.cumprod().astype(np.int) + return M_vector + + @staticmethod + def build_time_columns_filtering_for_a_node(node_indx: int, p_indxs: typing.List) -> np.ndarray: + """ + Builds the necessary structure to filter the desired columns indicated by ``node_indx`` and ``p_indxs`` + in the dataset. + This structute will be used in the computation of the state res times. + :param node_indx: the index of the node + :type node_indx: int + :param p_indxs: the indexes of the node's parents + :type p_indxs: List + :return: The filtering structure for times estimation + :rtype: numpy.ndArray + """ + return np.append(np.array([node_indx], dtype=np.int), p_indxs).astype(np.int) + + @staticmethod + def build_transition_filtering_for_a_node(node_indx: int, p_indxs: typing.List, nodes_number: int) \ + -> np.ndarray: + """Builds the necessary structure to filter the desired columns indicated by ``node_indx`` and ``p_indxs`` + in the dataset. + This structure will be used in the computation of the state transitions values. + :param node_indx: the index of the node + :type node_indx: int + :param p_indxs: the indexes of the node's parents + :type p_indxs: List + :param nodes_number: the total number of nodes in the dataset + :type nodes_number: int + :return: The filtering structure for transitions estimation + :rtype: numpy.ndArray + """ + return np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int) + + @staticmethod + def build_p_comb_structure_for_a_node(parents_values: typing.List) -> np.ndarray: + """ + Builds the combinatorial structure that contains the combinations of all the values contained in + ``parents_values``. + + :param parents_values: the cardinalities of the nodes + :type parents_values: List + :return: A numpy matrix containing a grid of the combinations + :rtype: numpy.ndArray + """ + tmp = [] + for val in parents_values: + tmp.append([x for x in range(val)]) + if len(parents_values) > 0: + parents_comb = np.array(np.meshgrid(*tmp)).T.reshape(-1, len(parents_values)) + if len(parents_values) > 1: + tmp_comb = parents_comb[:, 1].copy() + parents_comb[:, 1] = parents_comb[:, 0].copy() + parents_comb[:, 0] = tmp_comb + else: + parents_comb = np.array([[]], dtype=np.int) + return parents_comb + + def get_parents_by_id(self, node_id) -> typing.List: + """Returns a list of labels of the parents of the node ``node_id`` + + :param node_id: the node label + :type node_id: string + :return: a List of labels of the parents + :rtype: List + """ + return list(self._graph.predecessors(node_id)) + + def get_states_number(self, node_id) -> int: + return self._graph.nodes[node_id]['val'] + + def get_node_indx(self, node_id) -> int: + return nx.get_node_attributes(self._graph, 'indx')[node_id] + + def get_positional_node_indx(self, node_id) -> int: + return self._graph.nodes[node_id]['pos_indx'] + + @property + def nodes(self) -> typing.List: + return self._graph_struct.nodes_labels + + @property + def edges(self) -> typing.List: + return list(self._graph.edges) + + @property + def nodes_indexes(self) -> np.ndarray: + return self._graph_struct.nodes_indexes + + @property + def nodes_values(self) -> np.ndarray: + return self._graph_struct.nodes_values + + @property + def time_scalar_indexing_strucure(self) -> np.ndarray: + return self._time_scalar_indexing_structure + + @property + def time_filtering(self) -> np.ndarray: + return self._time_filtering + + @property + def transition_scalar_indexing_structure(self) -> np.ndarray: + return self._transition_scalar_indexing_structure + + @property + def transition_filtering(self) -> np.ndarray: + return self._transition_filtering + + @property + def p_combs(self) -> np.ndarray: + return self._p_combs_structure diff --git a/PyCTBN/build/lib/PyCTBN/structure_graph/sample_path.py b/PyCTBN/build/lib/PyCTBN/structure_graph/sample_path.py new file mode 100644 index 0000000..80b51d9 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/structure_graph/sample_path.py @@ -0,0 +1,91 @@ + + +import numpy as np +import pandas as pd + +from .structure import Structure +from .trajectory import Trajectory +from ..utility.abstract_importer import AbstractImporter + + + +class SamplePath(object): + """Aggregates all the informations about the trajectories, the real structure of the sampled net and variables + cardinalites. Has the task of creating the objects ``Trajectory`` and ``Structure`` that will + contain the mentioned data. + + :param importer: the Importer object which contains the imported and processed data + :type importer: AbstractImporter + :_trajectories: the ``Trajectory`` object that will contain all the concatenated trajectories + :_structure: the ``Structure`` Object that will contain all the structural infos about the net + :_total_variables_count: the number of variables in the net + """ + def __init__(self, importer: AbstractImporter): + """Constructor Method + """ + self._importer = importer + if self._importer._df_variables is None or self._importer._concatenated_samples is None: + raise RuntimeError('The importer object has to contain the all processed data!') + if self._importer._df_variables.empty: + raise RuntimeError('The importer object has to contain the all processed data!') + if isinstance(self._importer._concatenated_samples, pd.DataFrame): + if self._importer._concatenated_samples.empty: + raise RuntimeError('The importer object has to contain the all processed data!') + if isinstance(self._importer._concatenated_samples, np.ndarray): + if self._importer._concatenated_samples.size == 0: + raise RuntimeError('The importer object has to contain the all processed data!') + self._trajectories = None + self._structure = None + self._total_variables_count = None + + def build_trajectories(self) -> None: + """Builds the Trajectory object that will contain all the trajectories. + Clears all the unused dataframes in ``_importer`` Object + """ + self._trajectories = \ + Trajectory(self._importer.build_list_of_samples_array(self._importer.concatenated_samples), + len(self._importer.sorter) + 1) + self._importer.clear_concatenated_frame() + + def build_structure(self) -> None: + """ + Builds the ``Structure`` object that aggregates all the infos about the net. + """ + if self._importer.sorter != self._importer.variables.iloc[:, 0].to_list(): + raise RuntimeError("The Dataset columns order have to match the order of labels in the variables Frame!") + + self._total_variables_count = len(self._importer.sorter) + labels = self._importer.variables.iloc[:, 0].to_list() + indxs = self._importer.variables.index.to_numpy() + vals = self._importer.variables.iloc[:, 1].to_numpy() + if self._importer.structure is None or self._importer.structure.empty: + edges = [] + else: + edges = list(self._importer.structure.to_records(index=False)) + self._structure = Structure(labels, indxs, vals, edges, + self._total_variables_count) + + def clear_memory(self): + self._importer._raw_data = [] + + @property + def trajectories(self) -> Trajectory: + return self._trajectories + + @property + def structure(self) -> Structure: + return self._structure + + @property + def total_variables_count(self) -> int: + return self._total_variables_count + + @property + def has_prior_net_structure(self) -> bool: + return bool(self._structure.edges) + + + + + + diff --git a/PyCTBN/build/lib/PyCTBN/structure_graph/set_of_cims.py b/PyCTBN/build/lib/PyCTBN/structure_graph/set_of_cims.py new file mode 100644 index 0000000..81caff5 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/structure_graph/set_of_cims.py @@ -0,0 +1,97 @@ + + +import typing + +import numpy as np + +from .conditional_intensity_matrix import ConditionalIntensityMatrix + + +class SetOfCims(object): + """Aggregates all the CIMS of the node identified by the label _node_id. + + :param node_id: the node label + :type node_ind: string + :param parents_states_number: the cardinalities of the parents + :type parents_states_number: List + :param node_states_number: the caridinality of the node + :type node_states_number: int + :param p_combs: the p_comb structure bound to this node + :type p_combs: numpy.ndArray + :_state_residence_time: matrix containing all the state residence time vectors for the node + :_transition_matrices: matrix containing all the transition matrices for the node + :_actual_cims: the cims of the node + """ + + def __init__(self, node_id: str, parents_states_number: typing.List, node_states_number: int, p_combs: np.ndarray): + """Constructor Method + """ + self._node_id = node_id + self._parents_states_number = parents_states_number + self._node_states_number = node_states_number + self._actual_cims = [] + self._state_residence_times = None + self._transition_matrices = None + self._p_combs = p_combs + self.build_times_and_transitions_structures() + + def build_times_and_transitions_structures(self) -> None: + """Initializes at the correct dimensions the state residence times matrix and the state transition matrices. + """ + if not self._parents_states_number: + self._state_residence_times = np.zeros((1, self._node_states_number), dtype=np.float) + self._transition_matrices = np.zeros((1, self._node_states_number, self._node_states_number), dtype=np.int) + else: + self._state_residence_times = \ + np.zeros((np.prod(self._parents_states_number), self._node_states_number), dtype=np.float) + self._transition_matrices = np.zeros([np.prod(self._parents_states_number), self._node_states_number, + self._node_states_number], dtype=np.int) + + def build_cims(self, state_res_times: np.ndarray, transition_matrices: np.ndarray) -> None: + """Build the ``ConditionalIntensityMatrix`` objects given the state residence times and transitions matrices. + Compute the cim coefficients.The class member ``_actual_cims`` will contain the computed cims. + + :param state_res_times: the state residence times matrix + :type state_res_times: numpy.ndArray + :param transition_matrices: the transition matrices + :type transition_matrices: numpy.ndArray + """ + for state_res_time_vector, transition_matrix in zip(state_res_times, transition_matrices): + cim_to_add = ConditionalIntensityMatrix(state_res_time_vector, transition_matrix) + cim_to_add.compute_cim_coefficients() + self._actual_cims.append(cim_to_add) + self._actual_cims = np.array(self._actual_cims) + self._transition_matrices = None + self._state_residence_times = None + + def filter_cims_with_mask(self, mask_arr: np.ndarray, comb: typing.List) -> np.ndarray: + """Filter the cims contained in the array ``_actual_cims`` given the boolean mask ``mask_arr`` and the index + ``comb``. + + :param mask_arr: the boolean mask that indicates which parent to consider + :type mask_arr: numpy.array + :param comb: the state/s of the filtered parents + :type comb: numpy.array + :return: Array of ``ConditionalIntensityMatrix`` objects + :rtype: numpy.array + """ + if mask_arr.size <= 1: + return self._actual_cims + else: + flat_indxs = np.argwhere(np.all(self._p_combs[:, mask_arr] == comb, axis=1)).ravel() + return self._actual_cims[flat_indxs] + + @property + def actual_cims(self) -> np.ndarray: + return self._actual_cims + + @property + def p_combs(self) -> np.ndarray: + return self._p_combs + + def get_cims_number(self): + return len(self._actual_cims) + + + + diff --git a/PyCTBN/build/lib/PyCTBN/structure_graph/structure.py b/PyCTBN/build/lib/PyCTBN/structure_graph/structure.py new file mode 100644 index 0000000..a9d60cc --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/structure_graph/structure.py @@ -0,0 +1,124 @@ + +import typing as ty + +import numpy as np + + +class Structure(object): + """Contains all the infos about the network structure(nodes labels, nodes caridinalites, edges, indexes) + + :param nodes_labels_list: the symbolic names of the variables + :type nodes_labels_list: List + :param nodes_indexes_arr: the indexes of the nodes + :type nodes_indexes_arr: numpy.ndArray + :param nodes_vals_arr: the cardinalites of the nodes + :type nodes_vals_arr: numpy.ndArray + :param edges_list: the edges of the network + :type edges_list: List + :param total_variables_number: the total number of variables in the dataset + :type total_variables_number: int + """ + + def __init__(self, nodes_labels_list: ty.List, nodes_indexes_arr: np.ndarray, nodes_vals_arr: np.ndarray, + edges_list: ty.List, total_variables_number: int): + """Constructor Method + """ + self._nodes_labels_list = nodes_labels_list + self._nodes_indexes_arr = nodes_indexes_arr + self._nodes_vals_arr = nodes_vals_arr + self._edges_list = edges_list + self._total_variables_number = total_variables_number + + def remove_node(self, node_id: str) -> None: + """Remove the node ``node_id`` from all the class members. + The class member ``_total_variables_number`` since it refers to the total number of variables in the dataset. + """ + node_positional_indx = self._nodes_labels_list.index(node_id) + del self._nodes_labels_list[node_positional_indx] + self._nodes_indexes_arr = np.delete(self._nodes_indexes_arr, node_positional_indx) + self._nodes_vals_arr = np.delete(self._nodes_vals_arr, node_positional_indx) + self._edges_list = [(from_node, to_node) for (from_node, to_node) in self._edges_list if (from_node != node_id + and to_node != node_id)] + + @property + def edges(self) -> ty.List: + return self._edges_list + + @property + def nodes_labels(self) -> ty.List: + return self._nodes_labels_list + + @property + def nodes_indexes(self) -> np.ndarray: + return self._nodes_indexes_arr + + @property + def nodes_values(self) -> np.ndarray: + return self._nodes_vals_arr + + @property + def total_variables_number(self) -> int: + return self._total_variables_number + + def get_node_id(self, node_indx: int) -> str: + """Given the ``node_index`` returns the node label. + + :param node_indx: the node index + :type node_indx: int + :return: the node label + :rtype: string + """ + return self._nodes_labels_list[node_indx] + + def clean_structure_edges(self): + self._edges_list = list() + + def add_edge(self,edge: tuple): + self._edges_list.append(tuple) + print(self._edges_list) + + def remove_edge(self,edge: tuple): + self._edges_list.remove(tuple) + + def contains_edge(self,edge:tuple) -> bool: + return edge in self._edges_list + + def get_node_indx(self, node_id: str) -> int: + """Given the ``node_index`` returns the node label. + + :param node_id: the node label + :type node_id: string + :return: the node index + :rtype: int + """ + pos_indx = self._nodes_labels_list.index(node_id) + return self._nodes_indexes_arr[pos_indx] + + def get_positional_node_indx(self, node_id: str) -> int: + return self._nodes_labels_list.index(node_id) + + def get_states_number(self, node: str) -> int: + """Given the node label ``node`` returns the cardinality of the node. + + :param node: the node label + :type node: string + :return: the node cardinality + :rtype: int + """ + pos_indx = self._nodes_labels_list.index(node) + return self._nodes_vals_arr[pos_indx] + + def __repr__(self): + return "Variables:\n" + str(self._nodes_labels_list) +"\nValues:\n"+ str(self._nodes_vals_arr) +\ + "\nEdges: \n" + str(self._edges_list) + + def __eq__(self, other): + """Overrides the default implementation""" + if isinstance(other, Structure): + return set(self._nodes_labels_list) == set(other._nodes_labels_list) and \ + np.array_equal(self._nodes_vals_arr, other._nodes_vals_arr) and \ + np.array_equal(self._nodes_indexes_arr, other._nodes_indexes_arr) and \ + self._edges_list == other._edges_list + + return False + diff --git a/PyCTBN/build/lib/PyCTBN/structure_graph/trajectory.py b/PyCTBN/build/lib/PyCTBN/structure_graph/trajectory.py new file mode 100644 index 0000000..36899b3 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/structure_graph/trajectory.py @@ -0,0 +1,45 @@ + +import typing + +import numpy as np + + +class Trajectory(object): + """ Abstracts the infos about a complete set of trajectories, represented as a numpy array of doubles + (the time deltas) and a numpy matrix of ints (the changes of states). + + :param list_of_columns: the list containing the times array and values matrix + :type list_of_columns: List + :param original_cols_number: total number of cols in the data + :type original_cols_number: int + :_actual_trajectory: the trajectory containing also the duplicated/shifted values + :_times: the array containing the time deltas + """ + + def __init__(self, list_of_columns: typing.List, original_cols_number: int): + """Constructor Method + """ + self._times = list_of_columns[0] + self._actual_trajectory = list_of_columns[1] + self._original_cols_number = original_cols_number + + @property + def trajectory(self) -> np.ndarray: + return self._actual_trajectory[:, :self._original_cols_number - 1] + + @property + def complete_trajectory(self) -> np.ndarray: + return self._actual_trajectory + + @property + def times(self): + return self._times + + def size(self): + return self._actual_trajectory.shape[0] + + def __repr__(self): + return "Complete Trajectory Rows: " + str(self.size()) + "\n" + self.complete_trajectory.__repr__() + \ + "\nTimes Rows:" + str(self.times.size) + "\n" + self.times.__repr__() + + diff --git a/PyCTBN/build/lib/PyCTBN/utility/__init__.py b/PyCTBN/build/lib/PyCTBN/utility/__init__.py new file mode 100644 index 0000000..f79749c --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/utility/__init__.py @@ -0,0 +1,4 @@ +from .abstract_importer import AbstractImporter +from .cache import Cache +from .json_importer import JsonImporter +from .sample_importer import SampleImporter \ No newline at end of file diff --git a/PyCTBN/build/lib/PyCTBN/utility/abstract_importer.py b/PyCTBN/build/lib/PyCTBN/utility/abstract_importer.py new file mode 100644 index 0000000..1cad352 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/utility/abstract_importer.py @@ -0,0 +1,164 @@ + +import typing +from abc import ABC, abstractmethod + +import numpy as np +import pandas as pd + +import copy + +#from sklearn.utils import resample + + +class AbstractImporter(ABC): + """Abstract class that exposes all the necessary methods to process the trajectories and the net structure. + + :param file_path: the file path, or dataset name if you import already processed data + :type file_path: str + :param trajectory_list: Dataframe or numpy array containing the concatenation of all the processed trajectories + :type trajectory_list: typing.Union[pandas.DataFrame, numpy.ndarray] + :param variables: Dataframe containing the nodes labels and cardinalities + :type variables: pandas.DataFrame + :prior_net_structure: Dataframe containing the structure of the network (edges) + :type prior_net_structure: pandas.DataFrame + :_sorter: A list containing the variables labels in the SAME order as the columns in ``concatenated_samples`` + + .. warning:: + The parameters ``variables`` and ``prior_net_structure`` HAVE to be properly constructed + as Pandas Dataframes with the following structure: + Header of _df_structure = [From_Node | To_Node] + Header of _df_variables = [Variable_Label | Variable_Cardinality] + See the tutorial on how to construct a correct ``concatenated_samples`` Dataframe/ndarray. + + .. note:: + See :class:``JsonImporter`` for an example implementation + + """ + + def __init__(self, file_path: str = None, trajectory_list: typing.Union[pd.DataFrame, np.ndarray] = None, + variables: pd.DataFrame = None, prior_net_structure: pd.DataFrame = None): + """Constructor + """ + self._file_path = file_path + self._df_samples_list = trajectory_list + self._concatenated_samples = [] + self._df_variables = variables + self._df_structure = prior_net_structure + self._sorter = None + super().__init__() + + @abstractmethod + def build_sorter(self, trajecory_header: object) -> typing.List: + """Initializes the ``_sorter`` class member from a trajectory dataframe, exctracting the header of the frame + and keeping ONLY the variables symbolic labels, cutting out the time label in the header. + + :param trajecory_header: an object that will be used to define the header + :type trajecory_header: object + :return: A list containing the processed header. + :rtype: List + """ + pass + + def compute_row_delta_sigle_samples_frame(self, sample_frame: pd.DataFrame, + columns_header: typing.List, shifted_cols_header: typing.List) \ + -> pd.DataFrame: + """Computes the difference between each value present in th time column. + Copies and shift by one position up all the values present in the remaining columns. + + :param sample_frame: the traj to be processed + :type sample_frame: pandas.Dataframe + :param columns_header: the original header of sample_frame + :type columns_header: List + :param shifted_cols_header: a copy of columns_header with changed names of the contents + :type shifted_cols_header: List + :return: The processed dataframe + :rtype: pandas.Dataframe + + .. warning:: + the Dataframe ``sample_frame`` has to follow the column structure of this header: + Header of sample_frame = [Time | Variable values] + """ + sample_frame = copy.deepcopy(sample_frame) + sample_frame.iloc[:, 0] = sample_frame.iloc[:, 0].diff().shift(-1) + shifted_cols = sample_frame[columns_header].shift(-1).fillna(0).astype('int32') + shifted_cols.columns = shifted_cols_header + sample_frame = sample_frame.assign(**shifted_cols) + sample_frame.drop(sample_frame.tail(1).index, inplace=True) + return sample_frame + + def compute_row_delta_in_all_samples_frames(self, df_samples_list: typing.List) -> None: + """Calls the method ``compute_row_delta_sigle_samples_frame`` on every dataframe present in the list + ``df_samples_list``. + Concatenates the result in the dataframe ``concatanated_samples`` + + :param df_samples_list: the datframe's list to be processed and concatenated + :type df_samples_list: List + + .. warning:: + The Dataframe sample_frame has to follow the column structure of this header: + Header of sample_frame = [Time | Variable values] + The class member self._sorter HAS to be properly INITIALIZED (See class members definition doc) + .. note:: + After the call of this method the class member ``concatanated_samples`` will contain all processed + and merged trajectories + """ + if not self._sorter: + raise RuntimeError("The class member self._sorter has to be INITIALIZED!") + shifted_cols_header = [s + "S" for s in self._sorter] + compute_row_delta = self.compute_row_delta_sigle_samples_frame + proc_samples_list = [compute_row_delta(sample, self._sorter, shifted_cols_header) + for sample in df_samples_list] + self._concatenated_samples = pd.concat(proc_samples_list) + + complete_header = self._sorter[:] + complete_header.insert(0,'Time') + complete_header.extend(shifted_cols_header) + self._concatenated_samples = self._concatenated_samples[complete_header] + + def build_list_of_samples_array(self, concatenated_sample: pd.DataFrame) -> typing.List: + """Builds a List containing the the delta times numpy array, and the complete transitions matrix + + :param concatenated_sample: the dataframe/array from which the time, and transitions matrix have to be extracted + and converted + :type concatenated_sample: pandas.Dataframe + :return: the resulting list of numpy arrays + :rtype: List + """ + + concatenated_array = concatenated_sample.to_numpy() + columns_list = [concatenated_array[:, 0], concatenated_array[:, 1:].astype(int)] + + return columns_list + + def clear_concatenated_frame(self) -> None: + """Removes all values in the dataframe concatenated_samples. + """ + if isinstance(self._concatenated_samples, pd.DataFrame): + self._concatenated_samples = self._concatenated_samples.iloc[0:0] + + @abstractmethod + def dataset_id(self) -> object: + """If the original dataset contains multiple dataset, this method returns a unique id to identify the current + dataset + """ + pass + + @property + def concatenated_samples(self) -> pd.DataFrame: + return self._concatenated_samples + + @property + def variables(self) -> pd.DataFrame: + return self._df_variables + + @property + def structure(self) -> pd.DataFrame: + return self._df_structure + + @property + def sorter(self) -> typing.List: + return self._sorter + + @property + def file_path(self) -> str: + return self._file_path diff --git a/PyCTBN/build/lib/PyCTBN/utility/cache.py b/PyCTBN/build/lib/PyCTBN/utility/cache.py new file mode 100644 index 0000000..8e0369b --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/utility/cache.py @@ -0,0 +1,58 @@ + +import typing + +from ..structure_graph.set_of_cims import SetOfCims + + +class Cache: + """This class acts as a cache of ``SetOfCims`` objects for a node. + + :__list_of_sets_of_parents: a list of ``Sets`` objects of the parents to which the cim in cache at SAME + index is related + :__actual_cache: a list of setOfCims objects + """ + + def __init__(self): + """Constructor Method + """ + self._list_of_sets_of_parents = [] + self._actual_cache = [] + + def find(self, parents_comb: typing.Set): #typing.Union[typing.Set, str] + """ + Tries to find in cache given the symbolic parents combination ``parents_comb`` the ``SetOfCims`` + related to that ``parents_comb``. + + :param parents_comb: the parents related to that ``SetOfCims`` + :type parents_comb: Set + :return: A ``SetOfCims`` object if the ``parents_comb`` index is found in ``__list_of_sets_of_parents``. + None otherwise. + :rtype: SetOfCims + """ + try: + #print("Cache State:", self.list_of_sets_of_indxs) + #print("Look For:", parents_comb) + result = self._actual_cache[self._list_of_sets_of_parents.index(parents_comb)] + #print("CACHE HIT!!!!", parents_comb) + return result + except ValueError: + return None + + def put(self, parents_comb: typing.Set, socim: SetOfCims): + """Place in cache the ``SetOfCims`` object, and the related symbolic index ``parents_comb`` in + ``__list_of_sets_of_parents``. + + :param parents_comb: the symbolic set index + :type parents_comb: Set + :param socim: the related SetOfCims object + :type socim: SetOfCims + """ + #print("Putting in cache:", parents_comb) + self._list_of_sets_of_parents.append(parents_comb) + self._actual_cache.append(socim) + + def clear(self): + """Clear the contents both of ``__actual_cache`` and ``__list_of_sets_of_parents``. + """ + del self._list_of_sets_of_parents[:] + del self._actual_cache[:] \ No newline at end of file diff --git a/PyCTBN/build/lib/PyCTBN/utility/json_importer.py b/PyCTBN/build/lib/PyCTBN/utility/json_importer.py new file mode 100644 index 0000000..edff212 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/utility/json_importer.py @@ -0,0 +1,176 @@ +import json +import typing + +import pandas as pd + + +from .abstract_importer import AbstractImporter + + +class JsonImporter(AbstractImporter): + """Implements the abstracts methods of AbstractImporter and adds all the necessary methods to process and prepare + the data in json extension. + + :param file_path: the path of the file that contains tha data to be imported + :type file_path: string + :param samples_label: the reference key for the samples in the trajectories + :type samples_label: string + :param structure_label: the reference key for the structure of the network data + :type structure_label: string + :param variables_label: the reference key for the cardinalites of the nodes data + :type variables_label: string + :param time_key: the key used to identify the timestamps in each trajectory + :type time_key: string + :param variables_key: the key used to identify the names of the variables in the net + :type variables_key: string + :_array_indx: the index of the outer JsonArray to extract the data from + :type _array_indx: int + :_df_samples_list: a Dataframe list in which every dataframe contains a trajectory + :_raw_data: The raw contents of the json file to import + :type _raw_data: List + """ + + def __init__(self, file_path: str, samples_label: str, structure_label: str, variables_label: str, time_key: str, + variables_key: str): + """Constructor method + + .. note:: + This constructor calls also the method ``read_json_file()``, so after the construction of the object + the class member ``_raw_data`` will contain the raw imported json data. + + """ + self._samples_label = samples_label + self._structure_label = structure_label + self._variables_label = variables_label + self._time_key = time_key + self._variables_key = variables_key + self._df_samples_list = None + self._array_indx = None + super(JsonImporter, self).__init__(file_path) + self._raw_data = self.read_json_file() + + def import_data(self, indx: int) -> None: + """Implements the abstract method of :class:`AbstractImporter`. + + :param indx: the index of the outer JsonArray to extract the data from + :type indx: int + """ + self._array_indx = indx + self._df_samples_list = self.import_trajectories(self._raw_data) + self._sorter = self.build_sorter(self._df_samples_list[0]) + self.compute_row_delta_in_all_samples_frames(self._df_samples_list) + self.clear_data_frame_list() + self._df_structure = self.import_structure(self._raw_data) + self._df_variables = self.import_variables(self._raw_data) + + def import_trajectories(self, raw_data: typing.List) -> typing.List: + """Imports the trajectories from the list of dicts ``raw_data``. + + :param raw_data: List of Dicts + :type raw_data: List + :return: List of dataframes containing all the trajectories + :rtype: List + """ + return self.normalize_trajectories(raw_data, self._array_indx, self._samples_label) + + def import_structure(self, raw_data: typing.List) -> pd.DataFrame: + """Imports in a dataframe the data in the list raw_data at the key ``_structure_label`` + + :param raw_data: List of Dicts + :type raw_data: List + :return: Dataframe containg the starting node a ending node of every arc of the network + :rtype: pandas.Dataframe + """ + return self.one_level_normalizing(raw_data, self._array_indx, self._structure_label) + + def import_variables(self, raw_data: typing.List) -> pd.DataFrame: + """Imports the data in ``raw_data`` at the key ``_variables_label``. + + :param raw_data: List of Dicts + :type raw_data: List + :return: Datframe containg the variables simbolic labels and their cardinalities + :rtype: pandas.Dataframe + """ + return self.one_level_normalizing(raw_data, self._array_indx, self._variables_label) + + def read_json_file(self) -> typing.List: + """Reads the JSON file in the path self.filePath. + + :return: The contents of the json file + :rtype: List + """ + with open(self._file_path) as f: + data = json.load(f) + return data + + def one_level_normalizing(self, raw_data: typing.List, indx: int, key: str) -> pd.DataFrame: + """Extracts the one-level nested data in the list ``raw_data`` at the index ``indx`` at the key ``key``. + + :param raw_data: List of Dicts + :type raw_data: List + :param indx: The index of the array from which the data have to be extracted + :type indx: int + :param key: the key for the Dicts from which exctract data + :type key: string + :return: A normalized dataframe + :rtype: pandas.Datframe + """ + return pd.DataFrame(raw_data[indx][key]) + + def normalize_trajectories(self, raw_data: typing.List, indx: int, trajectories_key: str) -> typing.List: + """ + Extracts the trajectories in ``raw_data`` at the index ``index`` at the key ``trajectories key``. + + :param raw_data: List of Dicts + :type raw_data: List + :param indx: The index of the array from which the data have to be extracted + :type indx: int + :param trajectories_key: the key of the trajectories objects + :type trajectories_key: string + :return: A list of daframes containg the trajectories + :rtype: List + """ + dataframe = pd.DataFrame + smps = raw_data[indx][trajectories_key] + df_samples_list = [dataframe(sample) for sample in smps] + return df_samples_list + + def build_sorter(self, sample_frame: pd.DataFrame) -> typing.List: + """Implements the abstract method build_sorter of the :class:`AbstractImporter` for this dataset. + """ + columns_header = list(sample_frame.columns.values) + columns_header.remove(self._time_key) + return columns_header + + def clear_data_frame_list(self) -> None: + """Removes all values present in the dataframes in the list ``_df_samples_list``. + """ + for indx in range(len(self._df_samples_list)): + self._df_samples_list[indx] = self._df_samples_list[indx].iloc[0:0] + + def dataset_id(self) -> object: + return self._array_indx + + def import_sampled_cims(self, raw_data: typing.List, indx: int, cims_key: str) -> typing.Dict: + """Imports the synthetic CIMS in the dataset in a dictionary, using variables labels + as keys for the set of CIMS of a particular node. + + :param raw_data: List of Dicts + :type raw_data: List + :param indx: The index of the array from which the data have to be extracted + :type indx: int + :param cims_key: the key where the json object cims are placed + :type cims_key: string + :return: a dictionary containing the sampled CIMS for all the variables in the net + :rtype: Dictionary + """ + cims_for_all_vars = {} + for var in raw_data[indx][cims_key]: + sampled_cims_list = [] + cims_for_all_vars[var] = sampled_cims_list + for p_comb in raw_data[indx][cims_key][var]: + cims_for_all_vars[var].append(pd.DataFrame(raw_data[indx][cims_key][var][p_comb]).to_numpy()) + return cims_for_all_vars + + + diff --git a/PyCTBN/build/lib/PyCTBN/utility/sample_importer.py b/PyCTBN/build/lib/PyCTBN/utility/sample_importer.py new file mode 100644 index 0000000..05073c8 --- /dev/null +++ b/PyCTBN/build/lib/PyCTBN/utility/sample_importer.py @@ -0,0 +1,65 @@ +import json +import typing + +import pandas as pd +import numpy as np + +from .abstract_importer import AbstractImporter + + + +class SampleImporter(AbstractImporter): + """Implements the abstracts methods of AbstractImporter and adds all the necessary methods to process and prepare + the data loaded directly by using DataFrame + + :param trajectory_list: the data that describes the trajectories + :type trajectory_list: typing.Union[pd.DataFrame, np.ndarray, typing.List] + :param variables: the data that describes the variables with name and cardinality + :type variables: typing.Union[pd.DataFrame, np.ndarray, typing.List] + :param prior_net_structure: the data of the real structure, if it exists + :type prior_net_structure: typing.Union[pd.DataFrame, np.ndarray, typing.List] + + :_df_samples_list: a Dataframe list in which every dataframe contains a trajectory + :_raw_data: The raw contents of the json file to import + :type _raw_data: List + """ + + def __init__(self, + trajectory_list: typing.Union[pd.DataFrame, np.ndarray, typing.List] = None, + variables: typing.Union[pd.DataFrame, np.ndarray, typing.List] = None, + prior_net_structure: typing.Union[pd.DataFrame, np.ndarray,typing.List] = None): + + 'If the data are not DataFrame, it will be converted' + if isinstance(variables,list) or isinstance(variables,np.ndarray): + variables = pd.DataFrame(variables) + if isinstance(variables,list) or isinstance(variables,np.ndarray): + prior_net_structure=pd.DataFrame(prior_net_structure) + + super(SampleImporter, self).__init__(trajectory_list =trajectory_list, + variables= variables, + prior_net_structure=prior_net_structure) + + def import_data(self, header_column = None): + + if header_column is not None: + self._sorter = header_column + else: + self._sorter = self.build_sorter(self._df_samples_list[0]) + + samples_list= self._df_samples_list + + if isinstance(samples_list, np.ndarray): + samples_list = samples_list.tolist() + + self.compute_row_delta_in_all_samples_frames(samples_list) + + def build_sorter(self, sample_frame: pd.DataFrame) -> typing.List: + """Implements the abstract method build_sorter of the :class:`AbstractImporter` in order to get the ordered variables list. + """ + columns_header = list(sample_frame.columns.values) + del columns_header[0] + return columns_header + + + def dataset_id(self) -> object: + pass \ No newline at end of file diff --git a/PyCTBN/build/lib/classes/__init__.py b/PyCTBN/build/lib/classes/__init__.py new file mode 100644 index 0000000..faff79c --- /dev/null +++ b/PyCTBN/build/lib/classes/__init__.py @@ -0,0 +1,8 @@ +import PyCTBN.PyCTBN.estimators +from PyCTBN.PyCTBN.estimators import * +import PyCTBN.PyCTBN.optimizers +from PyCTBN.PyCTBN.optimizers import * +import PyCTBN.PyCTBN.structure_graph +from PyCTBN.PyCTBN.structure_graph import * +import PyCTBN.PyCTBN.utility +from PyCTBN.PyCTBN.utility import * \ No newline at end of file diff --git a/PyCTBN/build/lib/classes/estimators/__init__.py b/PyCTBN/build/lib/classes/estimators/__init__.py new file mode 100644 index 0000000..112086f --- /dev/null +++ b/PyCTBN/build/lib/classes/estimators/__init__.py @@ -0,0 +1,5 @@ +from .fam_score_calculator import FamScoreCalculator +from .parameters_estimator import ParametersEstimator +from .structure_estimator import StructureEstimator +from .structure_constraint_based_estimator import StructureConstraintBasedEstimator +from .structure_score_based_estimator import StructureScoreBasedEstimator diff --git a/PyCTBN/build/lib/classes/estimators/fam_score_calculator.py b/PyCTBN/build/lib/classes/estimators/fam_score_calculator.py new file mode 100644 index 0000000..5b0b591 --- /dev/null +++ b/PyCTBN/build/lib/classes/estimators/fam_score_calculator.py @@ -0,0 +1,272 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph + +from math import log + +from scipy.special import loggamma +from random import choice + +from ..structure_graph.set_of_cims import SetOfCims +from ..structure_graph.network_graph import NetworkGraph +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix + + +''' + +''' + + +class FamScoreCalculator: + """ + Has the task of calculating the FamScore of a node by using a Bayesian score function + """ + + def __init__(self): + #np.seterr('raise') + pass + + # region theta + + def marginal_likelihood_theta(self, + cims: ConditionalIntensityMatrix, + alpha_xu: float, + alpha_xxu: float): + """ + Calculate the FamScore value of the node identified by the label node_id + + :param cims: np.array with all the node's cims + :type cims: np.array + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type alpha_xu: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta + :rtype: float + """ + return np.sum( + [self.variable_cim_xu_marginal_likelihood_theta(cim, + alpha_xu, + alpha_xxu) + for cim in cims]) + + def variable_cim_xu_marginal_likelihood_theta(self, + cim: ConditionalIntensityMatrix, + alpha_xu: float, + alpha_xxu: float): + """ + Calculate the value of the marginal likelihood over theta given a cim + + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type alpha_xu: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta + :rtype: float + """ + + 'get cim length' + values = len(cim._state_residence_times) + + 'compute the marginal likelihood for the current cim' + return np.sum([ + self.single_cim_xu_marginal_likelihood_theta( + index, + cim, + alpha_xu, + alpha_xxu) + for index in range(values)]) + + def single_cim_xu_marginal_likelihood_theta(self, + index: int, + cim: ConditionalIntensityMatrix, + alpha_xu: float, + alpha_xxu: float): + """ + Calculate the marginal likelihood on q of the node when assumes a specif value + and a specif parents's assignment + + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta when the node assumes a specif value + :rtype: float + """ + + values = list(range(len(cim._state_residence_times))) + + 'remove the index because of the x != x^ condition in the summation ' + values.remove(index) + + 'uncomment for alpha xx not uniform' + #alpha_xxu = alpha_xu * cim.state_transition_matrix[index,index_x_first] / cim.state_transition_matrix[index, index]) + + return (loggamma(alpha_xu) - loggamma(alpha_xu + cim.state_transition_matrix[index, index])) \ + + \ + np.sum([self.single_internal_cim_xxu_marginal_likelihood_theta( + cim.state_transition_matrix[index,index_x_first], + alpha_xxu) + for index_x_first in values]) + + + def single_internal_cim_xxu_marginal_likelihood_theta(self, + M_xxu_suff_stats: float, + alpha_xxu: float=1): + """Calculate the second part of the marginal likelihood over theta formula + + :param M_xxu_suff_stats: value of the suffucient statistic M[xx'|u] + :type M_xxu_suff_stats: float + :param alpha_xxu: distribuited hyperparameter over the CTBN’s theta parameters + :type alpha_xxu: float + + :return: the value of the marginal likelihood over theta when the node assumes a specif value + :rtype: float + """ + return loggamma(alpha_xxu+M_xxu_suff_stats) - loggamma(alpha_xxu) + + # endregion + + # region q + + def marginal_likelihood_q(self, + cims: np.array, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the value of the marginal likelihood over q of the node identified by the label node_id + + :param cims: np.array with all the node's cims + :type cims: np.array + :param tau_xu: hyperparameter over the CTBN’s q parameters + :type tau_xu: float + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + + + :return: the value of the marginal likelihood over q + :rtype: float + """ + + return np.sum([self.variable_cim_xu_marginal_likelihood_q(cim, tau_xu, alpha_xu) for cim in cims]) + + def variable_cim_xu_marginal_likelihood_q(self, + cim: ConditionalIntensityMatrix, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the value of the marginal likelihood over q given a cim + + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param tau_xu: hyperparameter over the CTBN’s q parameters + :type tau_xu: float + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + + + :return: the value of the marginal likelihood over q + :rtype: float + """ + + 'get cim length' + values=len(cim._state_residence_times) + + 'compute the marginal likelihood for the current cim' + return np.sum([ + self.single_cim_xu_marginal_likelihood_q( + cim.state_transition_matrix[index, index], + cim._state_residence_times[index], + tau_xu, + alpha_xu) + for index in range(values)]) + + + def single_cim_xu_marginal_likelihood_q(self, + M_xu_suff_stats: float, + T_xu_suff_stats: float, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the marginal likelihood on q of the node when assumes a specif value + and a specif parents's assignment + + :param M_xu_suff_stats: value of the suffucient statistic M[x|u] + :type M_xxu_suff_stats: float + :param T_xu_suff_stats: value of the suffucient statistic T[x|u] + :type T_xu_suff_stats: float + :param cim: A conditional_intensity_matrix object with the sufficient statistics + :type cim: class:'ConditionalIntensityMatrix' + :param tau_xu: hyperparameter over the CTBN’s q parameters + :type tau_xu: float + :param alpha_xu: hyperparameter over the CTBN’s q parameters + :type alpha_xu: float + + + :return: the value of the marginal likelihood of the node when assumes a specif value + :rtype: float + """ + return ( + loggamma(alpha_xu + M_xu_suff_stats + 1) + + (log(tau_xu) + * + (alpha_xu+1)) + ) \ + - \ + (loggamma(alpha_xu + 1)+( + log(tau_xu + T_xu_suff_stats) + * + (alpha_xu + M_xu_suff_stats + 1)) + ) + + # end region + + def get_fam_score(self, + cims: np.array, + tau_xu: float=0.1, + alpha_xu: float=1): + """ + Calculate the FamScore value of the node + + + :param cims: np.array with all the node's cims + :type cims: np.array + :param tau_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type tau_xu: float, optional + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 1 + :type alpha_xu: float, optional + + + :return: the FamScore value of the node + :rtype: float + """ + #print("------") + #print(self.marginal_likelihood_q(cims, + # tau_xu, + # alpha_xu)) + + #print(self.marginal_likelihood_theta(cims, + # alpha_xu, + # alpha_xxu)) + 'calculate alpha_xxu as a uniform distribution' + alpha_xxu = alpha_xu /(len(cims[0]._state_residence_times) - 1) + + return self.marginal_likelihood_q(cims, + tau_xu, + alpha_xu) \ + + \ + self.marginal_likelihood_theta(cims, + alpha_xu, + alpha_xxu) diff --git a/PyCTBN/build/lib/classes/estimators/parameters_estimator.py b/PyCTBN/build/lib/classes/estimators/parameters_estimator.py new file mode 100644 index 0000000..4754d58 --- /dev/null +++ b/PyCTBN/build/lib/classes/estimators/parameters_estimator.py @@ -0,0 +1,143 @@ +import sys +sys.path.append('../') +import numpy as np + +from ..structure_graph.network_graph import NetworkGraph +from ..structure_graph.set_of_cims import SetOfCims +from ..structure_graph.trajectory import Trajectory + + +class ParametersEstimator(object): + """Has the task of computing the cims of particular node given the trajectories and the net structure + in the graph ``_net_graph``. + + :param trajectories: the trajectories + :type trajectories: Trajectory + :param net_graph: the net structure + :type net_graph: NetworkGraph + :_single_set_of_cims: the set of cims object that will hold the cims of the node + """ + + def __init__(self, trajectories: Trajectory, net_graph: NetworkGraph): + """Constructor Method + """ + self._trajectories = trajectories + self._net_graph = net_graph + self._single_set_of_cims = None + + def fast_init(self, node_id: str) -> None: + """Initializes all the necessary structures for the parameters estimation for the node ``node_id``. + + :param node_id: the node label + :type node_id: string + """ + p_vals = self._net_graph._aggregated_info_about_nodes_parents[2] + node_states_number = self._net_graph.get_states_number(node_id) + self._single_set_of_cims = SetOfCims(node_id, p_vals, node_states_number, self._net_graph.p_combs) + + def compute_parameters_for_node(self, node_id: str) -> SetOfCims: + """Compute the CIMS of the node identified by the label ``node_id``. + + :param node_id: the node label + :type node_id: string + :return: A SetOfCims object filled with the computed CIMS + :rtype: SetOfCims + """ + node_indx = self._net_graph.get_node_indx(node_id) + state_res_times = self._single_set_of_cims._state_residence_times + transition_matrices = self._single_set_of_cims._transition_matrices + ParametersEstimator.compute_state_res_time_for_node(self._trajectories.times, + self._trajectories.trajectory, + self._net_graph.time_filtering, + self._net_graph.time_scalar_indexing_strucure, + state_res_times) + ParametersEstimator.compute_state_transitions_for_a_node(node_indx, self._trajectories.complete_trajectory, + self._net_graph.transition_filtering, + self._net_graph.transition_scalar_indexing_structure, + transition_matrices) + self._single_set_of_cims.build_cims(state_res_times, transition_matrices) + return self._single_set_of_cims + + @staticmethod + def compute_state_res_time_for_node(times: np.ndarray, trajectory: np.ndarray, + cols_filter: np.ndarray, scalar_indexes_struct: np.ndarray, + T: np.ndarray) -> None: + """Compute the state residence times for a node and fill the matrix ``T`` with the results + + :param node_indx: the index of the node + :type node_indx: int + :param times: the times deltas vector + :type times: numpy.array + :param trajectory: the trajectory + :type trajectory: numpy.ndArray + :param cols_filter: the columns filtering structure + :type cols_filter: numpy.array + :param scalar_indexes_struct: the indexing structure + :type scalar_indexes_struct: numpy.array + :param T: the state residence times vectors + :type T: numpy.ndArray + """ + T[:] = np.bincount(np.sum(trajectory[:, cols_filter] * scalar_indexes_struct / scalar_indexes_struct[0], axis=1) + .astype(np.int), \ + times, + minlength=scalar_indexes_struct[-1]).reshape(-1, T.shape[1]) + + @staticmethod + def compute_state_transitions_for_a_node(node_indx: int, trajectory: np.ndarray, cols_filter: np.ndarray, + scalar_indexing: np.ndarray, M: np.ndarray) -> None: + """Compute the state residence times for a node and fill the matrices ``M`` with the results. + + :param node_indx: the index of the node + :type node_indx: int + :param trajectory: the trajectory + :type trajectory: numpy.ndArray + :param cols_filter: the columns filtering structure + :type cols_filter: numpy.array + :param scalar_indexing: the indexing structure + :type scalar_indexing: numpy.array + :param M: the state transitions matrices + :type M: numpy.ndArray + """ + diag_indices = np.array([x * M.shape[1] + x % M.shape[1] for x in range(M.shape[0] * M.shape[1])], + dtype=np.int64) + trj_tmp = trajectory[trajectory[:, int(trajectory.shape[1] / 2) + node_indx].astype(np.int) >= 0] + M[:] = np.bincount(np.sum(trj_tmp[:, cols_filter] * scalar_indexing / scalar_indexing[0], axis=1).astype(np.int) + , minlength=scalar_indexing[-1]).reshape(-1, M.shape[1], M.shape[2]) + M_raveled = M.ravel() + M_raveled[diag_indices] = 0 + M_raveled[diag_indices] = np.sum(M, axis=2).ravel() + + def init_sets_cims_container(self): + self.sets_of_cims_struct = acims.SetsOfCimsContainer(self.net_graph.nodes, + self.net_graph.nodes_values, + self.net_graph.get_ordered_by_indx_parents_values_for_all_nodes(), + self.net_graph.p_combs) + + def compute_parameters(self): + #print(self.net_graph.get_nodes()) + #print(self.amalgamated_cims_struct.sets_of_cims) + #enumerate(zip(self.net_graph.get_nodes(), self.amalgamated_cims_struct.sets_of_cims)) + for indx, aggr in enumerate(zip(self.net_graph.nodes, self.sets_of_cims_struct.sets_of_cims)): + #print(self.net_graph.time_filtering[indx]) + #print(self.net_graph.time_scalar_indexing_strucure[indx]) + self.compute_state_res_time_for_node(self.net_graph.get_node_indx(aggr[0]), self.sample_path.trajectories.times, + self.sample_path.trajectories.trajectory, + self.net_graph.time_filtering[indx], + self.net_graph.time_scalar_indexing_strucure[indx], + aggr[1]._state_residence_times) + #print(self.net_graph.transition_filtering[indx]) + #print(self.net_graph.transition_scalar_indexing_structure[indx]) + self.compute_state_transitions_for_a_node(self.net_graph.get_node_indx(aggr[0]), + self.sample_path.trajectories.complete_trajectory, + self.net_graph.transition_filtering[indx], + self.net_graph.transition_scalar_indexing_structure[indx], + aggr[1]._transition_matrices) + aggr[1].build_cims(aggr[1]._state_residence_times, aggr[1]._transition_matrices) + + + + + + + + diff --git a/PyCTBN/build/lib/classes/estimators/structure_constraint_based_estimator.py b/PyCTBN/build/lib/classes/estimators/structure_constraint_based_estimator.py new file mode 100644 index 0000000..7d5721e --- /dev/null +++ b/PyCTBN/build/lib/classes/estimators/structure_constraint_based_estimator.py @@ -0,0 +1,238 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph +import os +from scipy.stats import chi2 as chi2_dist +from scipy.stats import f as f_dist +from tqdm import tqdm + +from ..utility.cache import Cache +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix +from ..structure_graph.network_graph import NetworkGraph +from .parameters_estimator import ParametersEstimator +from .structure_estimator import StructureEstimator +from ..structure_graph.sample_path import SamplePath +from ..structure_graph.structure import Structure +from ..optimizers.constraint_based_optimizer import ConstraintBasedOptimizer + +import concurrent.futures + + + +import multiprocessing +from multiprocessing import Pool + + +class StructureConstraintBasedEstimator(StructureEstimator): + """ + Has the task of estimating the network structure given the trajectories in samplepath by using a constraint-based approach. + + :param sample_path: the _sample_path object containing the trajectories and the real structure + :type sample_path: SamplePath + :param exp_test_alfa: the significance level for the exponential Hp test + :type exp_test_alfa: float + :param chi_test_alfa: the significance level for the chi Hp test + :type chi_test_alfa: float + :_nodes: the nodes labels + :_nodes_vals: the nodes cardinalities + :_nodes_indxs: the nodes indexes + :_complete_graph: the complete directed graph built using the nodes labels in ``_nodes`` + :_cache: the Cache object + """ + + def __init__(self, sample_path: SamplePath, exp_test_alfa: float, chi_test_alfa: float,known_edges: typing.List= [],thumb_threshold:int = 25): + super().__init__(sample_path,known_edges) + self._exp_test_sign = exp_test_alfa + self._chi_test_alfa = chi_test_alfa + self._thumb_threshold = thumb_threshold + self._cache = Cache() + + def complete_test(self, test_parent: str, test_child: str, parent_set: typing.List, child_states_numb: int, + tot_vars_count: int, parent_indx, child_indx) -> bool: + """Performs a complete independence test on the directed graphs G1 = {test_child U parent_set} + G2 = {G1 U test_parent} (added as an additional parent of the test_child). + Generates all the necessary structures and datas to perform the tests. + + :param test_parent: the node label of the test parent + :type test_parent: string + :param test_child: the node label of the child + :type test_child: string + :param parent_set: the common parent set + :type parent_set: List + :param child_states_numb: the cardinality of the ``test_child`` + :type child_states_numb: int + :param tot_vars_count: the total number of variables in the net + :type tot_vars_count: int + :return: True iff test_child and test_parent are independent given the sep_set parent_set. False otherwise + :rtype: bool + """ + p_set = parent_set[:] + complete_info = parent_set[:] + complete_info.append(test_child) + + parents = np.array(parent_set) + parents = np.append(parents, test_parent) + sorted_parents = self._nodes[np.isin(self._nodes, parents)] + cims_filter = sorted_parents != test_parent + + p_set.insert(0, test_parent) + sofc2 = self._cache.find(set(p_set)) + + if not sofc2: + complete_info.append(test_parent) + bool_mask2 = np.isin(self._nodes, complete_info) + l2 = list(self._nodes[bool_mask2]) + indxs2 = self._nodes_indxs[bool_mask2] + vals2 = self._nodes_vals[bool_mask2] + eds2 = list(itertools.product(p_set, test_child)) + s2 = Structure(l2, indxs2, vals2, eds2, tot_vars_count) + g2 = NetworkGraph(s2) + g2.fast_init(test_child) + p2 = ParametersEstimator(self._sample_path.trajectories, g2) + p2.fast_init(test_child) + sofc2 = p2.compute_parameters_for_node(test_child) + self._cache.put(set(p_set), sofc2) + + del p_set[0] + sofc1 = self._cache.find(set(p_set)) + if not sofc1: + g2.remove_node(test_parent) + g2.fast_init(test_child) + p2 = ParametersEstimator(self._sample_path.trajectories, g2) + p2.fast_init(test_child) + sofc1 = p2.compute_parameters_for_node(test_child) + self._cache.put(set(p_set), sofc1) + thumb_value = 0.0 + if child_states_numb > 2: + parent_val = self._sample_path.structure.get_states_number(test_parent) + bool_mask_vals = np.isin(self._nodes, parent_set) + parents_vals = self._nodes_vals[bool_mask_vals] + thumb_value = self.compute_thumb_value(parent_val, child_states_numb, parents_vals) + for cim1, p_comb in zip(sofc1.actual_cims, sofc1.p_combs): + cond_cims = sofc2.filter_cims_with_mask(cims_filter, p_comb) + for cim2 in cond_cims: + if not self.independence_test(child_states_numb, cim1, cim2, thumb_value, parent_indx, child_indx): + return False + return True + + def independence_test(self, child_states_numb: int, cim1: ConditionalIntensityMatrix, + cim2: ConditionalIntensityMatrix, thumb_value: float, parent_indx, child_indx) -> bool: + """Compute the actual independence test using two cims. + It is performed first the exponential test and if the null hypothesis is not rejected, + it is performed also the chi_test. + + :param child_states_numb: the cardinality of the test child + :type child_states_numb: int + :param cim1: a cim belonging to the graph without test parent + :type cim1: ConditionalIntensityMatrix + :param cim2: a cim belonging to the graph with test parent + :type cim2: ConditionalIntensityMatrix + :return: True iff both tests do NOT reject the null hypothesis of independence. False otherwise. + :rtype: bool + """ + M1 = cim1.state_transition_matrix + M2 = cim2.state_transition_matrix + r1s = M1.diagonal() + r2s = M2.diagonal() + C1 = cim1.cim + C2 = cim2.cim + if child_states_numb > 2: + if (np.sum(np.diagonal(M1)) / thumb_value) < self._thumb_threshold: + self._removable_edges_matrix[parent_indx][child_indx] = False + return False + F_stats = C2.diagonal() / C1.diagonal() + exp_alfa = self._exp_test_sign + for val in range(0, child_states_numb): + if F_stats[val] < f_dist.ppf(exp_alfa / 2, r1s[val], r2s[val]) or \ + F_stats[val] > f_dist.ppf(1 - exp_alfa / 2, r1s[val], r2s[val]): + return False + M1_no_diag = M1[~np.eye(M1.shape[0], dtype=bool)].reshape(M1.shape[0], -1) + M2_no_diag = M2[~np.eye(M2.shape[0], dtype=bool)].reshape( + M2.shape[0], -1) + chi_2_quantile = chi2_dist.ppf(1 - self._chi_test_alfa, child_states_numb - 1) + Ks = np.sqrt(r1s / r2s) + Ls = np.sqrt(r2s / r1s) + for val in range(0, child_states_numb): + Chi = np.sum(np.power(Ks[val] * M2_no_diag[val] - Ls[val] *M1_no_diag[val], 2) / + (M1_no_diag[val] + M2_no_diag[val])) + if Chi > chi_2_quantile: + return False + return True + + def compute_thumb_value(self, parent_val, child_val, parent_set_vals): + """Compute the value to test against the thumb_threshold. + + :param parent_val: test parent's variable cardinality + :type parent_val: int + :param child_val: test child's variable cardinality + :type child_val: int + :param parent_set_vals: the cardinalities of the nodes in the current sep-set + :type parent_set_vals: List + :return: the thumb value for the current independence test + :rtype: int + """ + df = (child_val - 1) ** 2 + df = df * parent_val + for v in parent_set_vals: + df = df * v + return df + + def one_iteration_of_CTPC_algorithm(self, var_id: str, tot_vars_count: int)-> typing.List: + """Performs an iteration of the CTPC algorithm using the node ``var_id`` as ``test_child``. + + :param var_id: the node label of the test child + :type var_id: string + """ + optimizer_obj = ConstraintBasedOptimizer( + node_id = var_id, + structure_estimator = self, + tot_vars_count = tot_vars_count) + return optimizer_obj.optimize_structure() + + + def ctpc_algorithm(self,disable_multiprocessing:bool= False ): + """Compute the CTPC algorithm over the entire net. + """ + ctpc_algo = self.one_iteration_of_CTPC_algorithm + total_vars_numb = self._sample_path.total_variables_count + + n_nodes= len(self._nodes) + + total_vars_numb_array = [total_vars_numb] * n_nodes + + 'get the number of CPU' + cpu_count = multiprocessing.cpu_count() + + + + 'Remove all the edges from the structure' + self._sample_path.structure.clean_structure_edges() + + 'Estimate the best parents for each node' + #with multiprocessing.Pool(processes=cpu_count) as pool: + #with get_context("spawn").Pool(processes=cpu_count) as pool: + if disable_multiprocessing: + print("DISABILITATO") + cpu_count = 1 + list_edges_partial = [ctpc_algo(n,total_vars_numb) for n in self._nodes] + else: + with concurrent.futures.ProcessPoolExecutor(max_workers=cpu_count) as executor: + list_edges_partial = executor.map(ctpc_algo, + self._nodes, + total_vars_numb_array) + #list_edges_partial = [ctpc_algo(n,total_vars_numb) for n in self._nodes] + + return set(itertools.chain.from_iterable(list_edges_partial)) + + + def estimate_structure(self,disable_multiprocessing:bool=False): + return self.ctpc_algorithm(disable_multiprocessing=disable_multiprocessing) + + + + diff --git a/PyCTBN/build/lib/classes/estimators/structure_estimator.py b/PyCTBN/build/lib/classes/estimators/structure_estimator.py new file mode 100644 index 0000000..fbf8ea9 --- /dev/null +++ b/PyCTBN/build/lib/classes/estimators/structure_estimator.py @@ -0,0 +1,187 @@ + +import itertools +import json +import typing + +import matplotlib.pyplot as plt +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph + +from abc import ABC + +import abc + +from ..utility.cache import Cache +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix +from ..structure_graph.network_graph import NetworkGraph +from .parameters_estimator import ParametersEstimator +from ..structure_graph.sample_path import SamplePath +from ..structure_graph.structure import Structure + + +class StructureEstimator(object): + """Has the task of estimating the network structure given the trajectories in ``samplepath``. + + :param sample_path: the _sample_path object containing the trajectories and the real structure + :type sample_path: SamplePath + :_nodes: the nodes labels + :_nodes_vals: the nodes cardinalities + :_nodes_indxs: the nodes indexes + :_complete_graph: the complete directed graph built using the nodes labels in ``_nodes`` + """ + + def __init__(self, sample_path: SamplePath, known_edges: typing.List = None): + self._sample_path = sample_path + self._nodes = np.array(self._sample_path.structure.nodes_labels) + self._nodes_vals = self._sample_path.structure.nodes_values + self._nodes_indxs = self._sample_path.structure.nodes_indexes + self._removable_edges_matrix = self.build_removable_edges_matrix(known_edges) + self._complete_graph = StructureEstimator.build_complete_graph(self._sample_path.structure.nodes_labels) + + + def build_removable_edges_matrix(self, known_edges: typing.List): + """Builds a boolean matrix who shows if a edge could be removed or not, based on prior knowledge given: + + :param known_edges: the list of nodes labels + :type known_edges: List + :return: a boolean matrix + :rtype: np.ndarray + """ + tot_vars_count = self._sample_path.total_variables_count + complete_adj_matrix = np.full((tot_vars_count, tot_vars_count), True) + if known_edges: + for edge in known_edges: + i = self._sample_path.structure.get_node_indx(edge[0]) + j = self._sample_path.structure.get_node_indx(edge[1]) + complete_adj_matrix[i][j] = False + return complete_adj_matrix + + @staticmethod + def build_complete_graph(node_ids: typing.List) -> nx.DiGraph: + """Builds a complete directed graph (no self loops) given the nodes labels in the list ``node_ids``: + + :param node_ids: the list of nodes labels + :type node_ids: List + :return: a complete Digraph Object + :rtype: networkx.DiGraph + """ + complete_graph = nx.DiGraph() + complete_graph.add_nodes_from(node_ids) + complete_graph.add_edges_from(itertools.permutations(node_ids, 2)) + return complete_graph + + + @staticmethod + def generate_possible_sub_sets_of_size( u: typing.List, size: int, parent_label: str): + """Creates a list containing all possible subsets of the list ``u`` of size ``size``, + that do not contains a the node identified by ``parent_label``. + + :param u: the list of nodes + :type u: List + :param size: the size of the subsets + :type size: int + :param parent_label: the node to exclude in the subsets generation + :type parent_label: string + :return: an Iterator Object containing a list of lists + :rtype: Iterator + """ + list_without_test_parent = u[:] + list_without_test_parent.remove(parent_label) + return map(list, itertools.combinations(list_without_test_parent, size)) + + def save_results(self) -> None: + """Save the estimated Structure to a .json file in the path where the data are loaded from. + The file is named as the input dataset but the `results_` word is appended to the results file. + """ + res = json_graph.node_link_data(self._complete_graph) + name = self._sample_path._importer.file_path.rsplit('/', 1)[-1] + name = name.split('.', 1)[0] + name += '_' + str(self._sample_path._importer.dataset_id()) + name += '.json' + file_name = 'results_' + name + with open(file_name, 'w') as f: + json.dump(res, f) + + + def remove_diagonal_elements(self, matrix): + m = matrix.shape[0] + strided = np.lib.stride_tricks.as_strided + s0, s1 = matrix.strides + return strided(matrix.ravel()[1:], shape=(m - 1, m), strides=(s0 + s1, s1)).reshape(m, -1) + + + @abc.abstractmethod + def estimate_structure(self) -> typing.List: + """Abstract method to estimate the structure + + :return: List of estimated edges + :rtype: Typing.List + """ + pass + + + def adjacency_matrix(self) -> np.ndarray: + """Converts the estimated structure ``_complete_graph`` to a boolean adjacency matrix representation. + + :return: The adjacency matrix of the graph ``_complete_graph`` + :rtype: numpy.ndArray + """ + return nx.adj_matrix(self._complete_graph).toarray().astype(bool) + + def spurious_edges(self) -> typing.List: + """Return the spurious edges present in the estimated structure, if a prior net structure is present in + ``_sample_path.structure``. + + :return: A list containing the spurious edges + :rtype: List + """ + if not self._sample_path.has_prior_net_structure: + raise RuntimeError("Can not compute spurious edges with no prior net structure!") + real_graph = nx.DiGraph() + real_graph.add_nodes_from(self._sample_path.structure.nodes_labels) + real_graph.add_edges_from(self._sample_path.structure.edges) + return nx.difference(real_graph, self._complete_graph).edges + + def save_plot_estimated_structure_graph(self) -> None: + """Plot the estimated structure in a graphical model style. + Spurious edges are colored in red. + """ + graph_to_draw = nx.DiGraph() + spurious_edges = self.spurious_edges() + non_spurious_edges = list(set(self._complete_graph.edges) - set(spurious_edges)) + print(non_spurious_edges) + edges_colors = ['red' if edge in spurious_edges else 'black' for edge in self._complete_graph.edges] + graph_to_draw.add_edges_from(spurious_edges) + graph_to_draw.add_edges_from(non_spurious_edges) + pos = nx.spring_layout(graph_to_draw, k=0.5*1/np.sqrt(len(graph_to_draw.nodes())), iterations=50,scale=10) + options = { + "node_size": 2000, + "node_color": "white", + "edgecolors": "black", + 'linewidths':2, + "with_labels":True, + "font_size":13, + 'connectionstyle': 'arc3, rad = 0.1', + "arrowsize": 15, + "arrowstyle": '<|-', + "width": 1, + "edge_color":edges_colors, + } + + nx.draw(graph_to_draw, pos, **options) + ax = plt.gca() + ax.margins(0.20) + plt.axis("off") + name = self._sample_path._importer.file_path.rsplit('/', 1)[-1] + name = name.split('.', 1)[0] + name += '_' + str(self._sample_path._importer.dataset_id()) + name += '.png' + plt.savefig(name) + plt.clf() + print("Estimated Structure Plot Saved At: ", os.path.abspath(name)) + + + + + diff --git a/PyCTBN/build/lib/classes/estimators/structure_score_based_estimator.py b/PyCTBN/build/lib/classes/estimators/structure_score_based_estimator.py new file mode 100644 index 0000000..2903db3 --- /dev/null +++ b/PyCTBN/build/lib/classes/estimators/structure_score_based_estimator.py @@ -0,0 +1,244 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np +from networkx.readwrite import json_graph + +from random import choice + +import concurrent.futures + +import copy + +from ..structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix +from ..structure_graph.network_graph import NetworkGraph +from .parameters_estimator import ParametersEstimator +from .structure_estimator import StructureEstimator +from ..structure_graph.sample_path import SamplePath +from ..structure_graph.structure import Structure +from .fam_score_calculator import FamScoreCalculator +from ..optimizers.hill_climbing_search import HillClimbing +from ..optimizers.tabu_search import TabuSearch + + +import multiprocessing +from multiprocessing import Pool + + + + +class StructureScoreBasedEstimator(StructureEstimator): + """ + Has the task of estimating the network structure given the trajectories in samplepath by + using a score based approach. + + :param sample_path: the _sample_path object containing the trajectories and the real structure + :type sample_path: SamplePath + :param tau_xu: hyperparameter over the CTBN’s q parameters, default to 0.1 + :type tau_xu: float, optional + :param alpha_xu: hyperparameter over the CTBN’s q parameters, default to 1 + :type alpha_xu: float, optional + :param known_edges: List of known edges, default to [] + :type known_edges: List, optional + + """ + + def __init__(self, sample_path: SamplePath, tau_xu:int=0.1, alpha_xu:int = 1,known_edges: typing.List= []): + super().__init__(sample_path,known_edges) + self.tau_xu=tau_xu + self.alpha_xu=alpha_xu + + + def estimate_structure(self, max_parents:int = None, iterations_number:int= 40, + patience:int = None, tabu_length:int = None, tabu_rules_duration:int = None, + optimizer: str = 'tabu',disable_multiprocessing:bool= False ): + """ + Compute the score-based algorithm to find the optimal structure + + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + :param tabu_length: maximum lenght of the data structures used in the optimization process, default to None + :type tabu_length: int, optional + :param tabu_rules_duration: number of iterations in which each rule keeps its value, default to None + :type tabu_rules_duration: int, optional + :param optimizer: name of the optimizer algorithm. Possible values: 'hill' (Hill climbing),'tabu' (tabu search), defualt to 'tabu' + :type optimizer: string, optional + :param disable_multiprocessing: true if you desire to disable the multiprocessing operations, default to False + :type disable_multiprocessing: Boolean, optional + """ + 'Save the true edges structure in tuples' + true_edges = copy.deepcopy(self._sample_path.structure.edges) + true_edges = set(map(tuple, true_edges)) + + 'Remove all the edges from the structure' + self._sample_path.structure.clean_structure_edges() + + estimate_parents = self.estimate_parents + + n_nodes= len(self._nodes) + + l_max_parents= [max_parents] * n_nodes + l_iterations_number = [iterations_number] * n_nodes + l_patience = [patience] * n_nodes + l_tabu_length = [tabu_length] * n_nodes + l_tabu_rules_duration = [tabu_rules_duration] * n_nodes + l_optimizer = [optimizer] * n_nodes + + + 'get the number of CPU' + cpu_count = multiprocessing.cpu_count() + print(f"CPU COUNT: {cpu_count}") + + if disable_multiprocessing: + cpu_count = 1 + + + + + + #with get_context("spawn").Pool(processes=cpu_count) as pool: + #with multiprocessing.Pool(processes=cpu_count) as pool: + + 'Estimate the best parents for each node' + if disable_multiprocessing: + list_edges_partial = [estimate_parents(n,max_parents,iterations_number,patience,tabu_length,tabu_rules_duration,optimizer) for n in self._nodes] + else: + with concurrent.futures.ProcessPoolExecutor(max_workers=cpu_count) as executor: + list_edges_partial = executor.map(estimate_parents, + self._nodes, + l_max_parents, + l_iterations_number, + l_patience, + l_tabu_length, + l_tabu_rules_duration, + l_optimizer) + + + + #list_edges_partial = p.map(estimate_parents, self._nodes) + #list_edges_partial= estimate_parents('Q',max_parents,iterations_number,patience,tabu_length,tabu_rules_duration,optimizer) + + 'Concatenate all the edges list' + set_list_edges = set(itertools.chain.from_iterable(list_edges_partial)) + + #print('-------------------------') + + + 'calculate precision and recall' + n_missing_edges = 0 + n_added_fake_edges = 0 + + try: + n_added_fake_edges = len(set_list_edges.difference(true_edges)) + + n_missing_edges = len(true_edges.difference(set_list_edges)) + + n_true_positive = len(true_edges) - n_missing_edges + + precision = n_true_positive / (n_true_positive + n_added_fake_edges) + + recall = n_true_positive / (n_true_positive + n_missing_edges) + + + # print(f"n archi reali non trovati: {n_missing_edges}") + # print(f"n archi non reali aggiunti: {n_added_fake_edges}") + print(true_edges) + print(set_list_edges) + print(f"precision: {precision} ") + print(f"recall: {recall} ") + except Exception as e: + print(f"errore: {e}") + + return set_list_edges + + + def estimate_parents(self,node_id:str, max_parents:int = None, iterations_number:int= 40, + patience:int = 10, tabu_length:int = None, tabu_rules_duration:int=5, + optimizer:str = 'hill' ): + """ + Use the FamScore of a node in order to find the best parent nodes + + :param node_id: current node's id + :type node_id: string + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + :param tabu_length: maximum lenght of the data structures used in the optimization process, default to None + :type tabu_length: int, optional + :param tabu_rules_duration: number of iterations in which each rule keeps its value, default to None + :type tabu_rules_duration: int, optional + :param optimizer: name of the optimizer algorithm. Possible values: 'hill' (Hill climbing),'tabu' (tabu search), defualt to 'tabu' + :type optimizer: string, optional + + :return: A list of the best edges for the currente node + :rtype: List + """ + + "choose the optimizer algotithm" + if optimizer == 'tabu': + optimizer = TabuSearch( + node_id = node_id, + structure_estimator = self, + max_parents = max_parents, + iterations_number = iterations_number, + patience = patience, + tabu_length = tabu_length, + tabu_rules_duration = tabu_rules_duration) + else: #if optimizer == 'hill': + optimizer = HillClimbing( + node_id = node_id, + structure_estimator = self, + max_parents = max_parents, + iterations_number = iterations_number, + patience = patience) + + "call the optmizer's function that calculates the current node's parents" + return optimizer.optimize_structure() + + + def get_score_from_graph(self, + graph: NetworkGraph, + node_id:str): + """ + Get the FamScore of a node + + :param node_id: current node's id + :type node_id: string + :param graph: current graph to be computed + :type graph: class:'NetworkGraph' + + + :return: The FamSCore for this graph structure + :rtype: float + """ + + 'inizialize the graph for a single node' + graph.fast_init(node_id) + + params_estimation = ParametersEstimator(self._sample_path.trajectories, graph) + + 'Inizialize and compute parameters for node' + params_estimation.fast_init(node_id) + SoCims = params_estimation.compute_parameters_for_node(node_id) + + 'calculate the FamScore for the node' + fam_score_obj = FamScoreCalculator() + + score = fam_score_obj.get_fam_score(SoCims.actual_cims,tau_xu = self.tau_xu,alpha_xu=self.alpha_xu) + + #print(f" lo score per {node_id} risulta: {score} ") + return score + + + + diff --git a/PyCTBN/build/lib/classes/optimizers/__init__.py b/PyCTBN/build/lib/classes/optimizers/__init__.py new file mode 100644 index 0000000..4162bf1 --- /dev/null +++ b/PyCTBN/build/lib/classes/optimizers/__init__.py @@ -0,0 +1,4 @@ +from .optimizer import Optimizer +from .tabu_search import TabuSearch +from .hill_climbing_search import HillClimbing +from .constraint_based_optimizer import ConstraintBasedOptimizer \ No newline at end of file diff --git a/PyCTBN/build/lib/classes/optimizers/constraint_based_optimizer.py b/PyCTBN/build/lib/classes/optimizers/constraint_based_optimizer.py new file mode 100644 index 0000000..65bc19c --- /dev/null +++ b/PyCTBN/build/lib/classes/optimizers/constraint_based_optimizer.py @@ -0,0 +1,87 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +from random import choice + +from abc import ABC + +import copy + + +from .optimizer import Optimizer +from ..estimators.structure_estimator import StructureEstimator +from ..structure_graph.network_graph import NetworkGraph + + +class ConstraintBasedOptimizer(Optimizer): + """ + Optimizer class that implement a CTPC Algorithm + + :param node_id: current node's id + :type node_id: string + :param structure_estimator: a structure estimator object with the information about the net + :type structure_estimator: class:'StructureEstimator' + :param tot_vars_count: number of variables in the dataset + :type tot_vars_count: int + """ + def __init__(self, + node_id:str, + structure_estimator: StructureEstimator, + tot_vars_count:int + ): + """ + Constructor + """ + super().__init__(node_id, structure_estimator) + self.tot_vars_count = tot_vars_count + + + + def optimize_structure(self): + """ + Compute Optimization process for a structure_estimator by using a CTPC Algorithm + + :return: the estimated structure for the node + :rtype: List + """ + print("##################TESTING VAR################", self.node_id) + + graph = NetworkGraph(self.structure_estimator._sample_path.structure) + + other_nodes = [node for node in self.structure_estimator._sample_path.structure.nodes_labels if node != self.node_id] + + for possible_parent in other_nodes: + graph.add_edges([(possible_parent,self.node_id)]) + + + u = other_nodes + #tests_parents_numb = len(u) + #complete_frame = self.complete_graph_frame + #test_frame = complete_frame.loc[complete_frame['To'].isin([self.node_id])] + child_states_numb = self.structure_estimator._sample_path.structure.get_states_number(self.node_id) + b = 0 + while b < len(u): + parent_indx = 0 + while parent_indx < len(u): + removed = False + test_parent = u[parent_indx] + i = self.structure_estimator._sample_path.structure.get_node_indx(test_parent) + j = self.structure_estimator._sample_path.structure.get_node_indx(self.node_id) + if self.structure_estimator._removable_edges_matrix[i][j]: + S = StructureEstimator.generate_possible_sub_sets_of_size(u, b, test_parent) + for parents_set in S: + if self.structure_estimator.complete_test(test_parent, self.node_id, parents_set, child_states_numb, self.tot_vars_count,i,j): + graph.remove_edges([(test_parent, self.node_id)]) + u.remove(test_parent) + removed = True + break + if not removed: + parent_indx += 1 + b += 1 + self.structure_estimator._cache.clear() + return graph.edges \ No newline at end of file diff --git a/PyCTBN/build/lib/classes/optimizers/hill_climbing_search.py b/PyCTBN/build/lib/classes/optimizers/hill_climbing_search.py new file mode 100644 index 0000000..6783be0 --- /dev/null +++ b/PyCTBN/build/lib/classes/optimizers/hill_climbing_search.py @@ -0,0 +1,135 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +from random import choice + +from abc import ABC + + +from .optimizer import Optimizer +from ..estimators.structure_estimator import StructureEstimator +from ..structure_graph.network_graph import NetworkGraph + + +class HillClimbing(Optimizer): + """ + Optimizer class that implement Hill Climbing Search + + + :param node_id: current node's id + :type node_id: string + :param structure_estimator: a structure estimator object with the information about the net + :type structure_estimator: class:'StructureEstimator' + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + + + + """ + def __init__(self, + node_id:str, + structure_estimator: StructureEstimator, + max_parents:int = None, + iterations_number:int= 40, + patience:int = None + ): + """ + Constructor + """ + super().__init__(node_id, structure_estimator) + self.max_parents = max_parents + self.iterations_number = iterations_number + self.patience = patience + + + + def optimize_structure(self) -> typing.List: + """ + Compute Optimization process for a structure_estimator by using a Hill Climbing Algorithm + + :return: the estimated structure for the node + :rtype: List + """ + + #'Create the graph for the single node' + graph = NetworkGraph(self.structure_estimator._sample_path.structure) + + 'get the index for the current node' + node_index = self.structure_estimator._sample_path._structure.get_node_indx(self.node_id) + + 'list of prior edges' + prior_parents = set() + + 'Add the edges from prior knowledge' + for i in range(len(self.structure_estimator._removable_edges_matrix)): + if not self.structure_estimator._removable_edges_matrix[i][node_index]: + parent_id= self.structure_estimator._sample_path._structure.get_node_id(i) + prior_parents.add(parent_id) + + 'Add the node to the starting structure' + graph.add_edges([(parent_id, self.node_id)]) + + + + 'get all the possible parents' + other_nodes = [node for node in + self.structure_estimator._sample_path.structure.nodes_labels if + node != self.node_id and + not prior_parents.__contains__(node)] + + actual_best_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + patince_count = 0 + for i in range(self.iterations_number): + 'choose a new random edge' + current_new_parent = choice(other_nodes) + current_edge = (current_new_parent,self.node_id) + added = False + parent_removed = None + + + if graph.has_edge(current_edge): + graph.remove_edges([current_edge]) + else: + 'check the max_parents constraint' + if self.max_parents is not None: + parents_list = graph.get_parents_by_id(self.node_id) + if len(parents_list) >= self.max_parents : + parent_removed = (choice(parents_list), self.node_id) + graph.remove_edges([parent_removed]) + graph.add_edges([current_edge]) + added = True + #print('**************************') + current_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + + if current_score > actual_best_score: + 'update current best score' + actual_best_score = current_score + patince_count = 0 + else: + 'undo the last update' + if added: + graph.remove_edges([current_edge]) + 'If a parent was removed, add it again to the graph' + if parent_removed is not None: + graph.add_edges([parent_removed]) + else: + graph.add_edges([current_edge]) + 'update patience count' + patince_count += 1 + + if self.patience is not None and patince_count > self.patience: + break + + print(f"finito variabile: {self.node_id}") + return graph.edges \ No newline at end of file diff --git a/PyCTBN/build/lib/classes/optimizers/optimizer.py b/PyCTBN/build/lib/classes/optimizers/optimizer.py new file mode 100644 index 0000000..36445c0 --- /dev/null +++ b/PyCTBN/build/lib/classes/optimizers/optimizer.py @@ -0,0 +1,39 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +import abc + +from ..estimators.structure_estimator import StructureEstimator + + + +class Optimizer(abc.ABC): + """ + Interface class for all the optimizer's child PyCTBN + + :param node_id: the node label + :type node_id: string + :param structure_estimator: A structureEstimator Object to predict the structure + :type structure_estimator: class:'StructureEstimator' + + """ + + def __init__(self, node_id:str, structure_estimator: StructureEstimator): + self.node_id = node_id + self.structure_estimator = structure_estimator + + + @abc.abstractmethod + def optimize_structure(self) -> typing.List: + """ + Compute Optimization process for a structure_estimator + + :return: the estimated structure for the node + :rtype: List + """ + pass diff --git a/PyCTBN/build/lib/classes/optimizers/tabu_search.py b/PyCTBN/build/lib/classes/optimizers/tabu_search.py new file mode 100644 index 0000000..e15dd40 --- /dev/null +++ b/PyCTBN/build/lib/classes/optimizers/tabu_search.py @@ -0,0 +1,199 @@ + +import itertools +import json +import typing + +import networkx as nx +import numpy as np + +from random import choice,sample + +from abc import ABC + + +from .optimizer import Optimizer +from ..estimators.structure_estimator import StructureEstimator +from ..structure_graph.network_graph import NetworkGraph + +import queue + + +class TabuSearch(Optimizer): + """ + Optimizer class that implement Tabu Search + + + :param node_id: current node's id + :type node_id: string + :param structure_estimator: a structure estimator object with the information about the net + :type structure_estimator: class:'StructureEstimator' + :param max_parents: maximum number of parents for each variable. If None, disabled, default to None + :type max_parents: int, optional + :param iterations_number: maximum number of optimization algorithm's iteration, default to 40 + :type iterations_number: int, optional + :param patience: number of iteration without any improvement before to stop the search.If None, disabled, default to None + :type patience: int, optional + :param tabu_length: maximum lenght of the data structures used in the optimization process, default to None + :type tabu_length: int, optional + :param tabu_rules_duration: number of iterations in which each rule keeps its value, default to None + :type tabu_rules_duration: int, optional + + + """ + def __init__(self, + node_id:str, + structure_estimator: StructureEstimator, + max_parents:int = None, + iterations_number:int= 40, + patience:int = None, + tabu_length:int = None, + tabu_rules_duration = None + ): + """ + Constructor + """ + super().__init__(node_id, structure_estimator) + self.max_parents = max_parents + self.iterations_number = iterations_number + self.patience = patience + self.tabu_length = tabu_length + self.tabu_rules_duration = tabu_rules_duration + + + def optimize_structure(self) -> typing.List: + """ + Compute Optimization process for a structure_estimator by using a Hill Climbing Algorithm + + :return: the estimated structure for the node + :rtype: List + """ + print(f"tabu search is processing the structure of {self.node_id}") + + 'Create the graph for the single node' + graph = NetworkGraph(self.structure_estimator._sample_path.structure) + + 'get the index for the current node' + node_index = self.structure_estimator._sample_path._structure.get_node_indx(self.node_id) + + 'list of prior edges' + prior_parents = set() + + 'Add the edges from prior knowledge' + for i in range(len(self.structure_estimator._removable_edges_matrix)): + if not self.structure_estimator._removable_edges_matrix[i][node_index]: + parent_id= self.structure_estimator._sample_path._structure.get_node_id(i) + prior_parents.add(parent_id) + + 'Add the node to the starting structure' + graph.add_edges([(parent_id, self.node_id)]) + + + + 'get all the possible parents' + other_nodes = set([node for node in + self.structure_estimator._sample_path.structure.nodes_labels if + node != self.node_id and + not prior_parents.__contains__(node)]) + + 'calculate the score for the node without parents' + actual_best_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + + 'initialize tabu_length and tabu_rules_duration if None' + if self.tabu_length is None: + self.tabu_length = len(other_nodes) + + if self.tabu_rules_duration is None: + self.tabu_tabu_rules_durationength = len(other_nodes) + + 'inizialize the data structures' + tabu_set = set() + tabu_queue = queue.Queue() + + patince_count = 0 + tabu_count = 0 + for i in range(self.iterations_number): + + current_possible_nodes = other_nodes.difference(tabu_set) + + 'choose a new random edge according to tabu restiction' + if(len(current_possible_nodes) > 0): + current_new_parent = sample(current_possible_nodes,k=1)[0] + else: + current_new_parent = tabu_queue.get() + tabu_set.remove(current_new_parent) + + + + current_edge = (current_new_parent,self.node_id) + added = False + parent_removed = None + + if graph.has_edge(current_edge): + graph.remove_edges([current_edge]) + else: + 'check the max_parents constraint' + if self.max_parents is not None: + parents_list = graph.get_parents_by_id(self.node_id) + if len(parents_list) >= self.max_parents : + parent_removed = (choice(parents_list), self.node_id) + graph.remove_edges([parent_removed]) + graph.add_edges([current_edge]) + added = True + #print('**************************') + current_score = self.structure_estimator.get_score_from_graph(graph,self.node_id) + + + # print("-------------------------------------------") + # print(f"Current new parent: {current_new_parent}") + # print(f"Current score: {current_score}") + # print(f"Current best score: {actual_best_score}") + # print(f"tabu list : {str(tabu_set)} length: {len(tabu_set)}") + # print(f"tabu queue : {str(tabu_queue)} length: {tabu_queue.qsize()}") + # print(f"graph edges: {graph.edges}") + + # print("-------------------------------------------") + # input() + if current_score > actual_best_score: + 'update current best score' + actual_best_score = current_score + patince_count = 0 + 'update tabu list' + + + else: + 'undo the last update' + if added: + graph.remove_edges([current_edge]) + 'If a parent was removed, add it again to the graph' + if parent_removed is not None: + graph.add_edges([parent_removed]) + else: + graph.add_edges([current_edge]) + 'update patience count' + patince_count += 1 + + + if tabu_queue.qsize() >= self.tabu_length: + current_removed = tabu_queue.get() + tabu_set.remove(current_removed) + 'Add the node on the tabu list' + tabu_queue.put(current_new_parent) + tabu_set.add(current_new_parent) + + tabu_count += 1 + + 'Every tabu_rules_duration step remove an item from the tabu list ' + if tabu_count % self.tabu_rules_duration == 0: + if tabu_queue.qsize() > 0: + current_removed = tabu_queue.get() + tabu_set.remove(current_removed) + tabu_count = 0 + else: + tabu_count = 0 + + if self.patience is not None and patince_count > self.patience: + break + + print(f"finito variabile: {self.node_id}") + return graph.edges \ No newline at end of file diff --git a/PyCTBN/build/lib/classes/structure_graph/__init__.py b/PyCTBN/build/lib/classes/structure_graph/__init__.py new file mode 100644 index 0000000..85f18a2 --- /dev/null +++ b/PyCTBN/build/lib/classes/structure_graph/__init__.py @@ -0,0 +1,6 @@ +from .conditional_intensity_matrix import ConditionalIntensityMatrix +from .network_graph import NetworkGraph +from .sample_path import SamplePath +from .set_of_cims import SetOfCims +from .structure import Structure +from .trajectory import Trajectory \ No newline at end of file diff --git a/PyCTBN/build/lib/classes/structure_graph/conditional_intensity_matrix.py b/PyCTBN/build/lib/classes/structure_graph/conditional_intensity_matrix.py new file mode 100644 index 0000000..4abfdd0 --- /dev/null +++ b/PyCTBN/build/lib/classes/structure_graph/conditional_intensity_matrix.py @@ -0,0 +1,42 @@ +import numpy as np + + +class ConditionalIntensityMatrix(object): + """Abstracts the Conditional Intesity matrix of a node as aggregation of the state residence times vector + and state transition matrix and the actual CIM matrix. + + :param state_residence_times: state residence times vector + :type state_residence_times: numpy.array + :param state_transition_matrix: the transitions count matrix + :type state_transition_matrix: numpy.ndArray + :_cim: the actual cim of the node + """ + def __init__(self, state_residence_times: np.array, state_transition_matrix: np.array): + """Constructor Method + """ + self._state_residence_times = state_residence_times + self._state_transition_matrix = state_transition_matrix + self._cim = self.state_transition_matrix.astype(np.float64) + + def compute_cim_coefficients(self) -> None: + """Compute the coefficients of the matrix _cim by using the following equality q_xx' = M[x, x'] / T[x]. + The class member ``_cim`` will contain the computed cim + """ + np.fill_diagonal(self._cim, self._cim.diagonal() * -1) + self._cim = ((self._cim.T + 1) / (self._state_residence_times + 1)).T + + @property + def state_residence_times(self) -> np.ndarray: + return self._state_residence_times + + @property + def state_transition_matrix(self) -> np.ndarray: + return self._state_transition_matrix + + @property + def cim(self) -> np.ndarray: + return self._cim + + def __repr__(self): + return 'CIM:\n' + str(self.cim) + diff --git a/PyCTBN/build/lib/classes/structure_graph/network_graph.py b/PyCTBN/build/lib/classes/structure_graph/network_graph.py new file mode 100644 index 0000000..623981d --- /dev/null +++ b/PyCTBN/build/lib/classes/structure_graph/network_graph.py @@ -0,0 +1,293 @@ + +import typing + +import networkx as nx +import numpy as np + +from .structure import Structure + + +class NetworkGraph(object): + """Abstracts the infos contained in the Structure class in the form of a directed graph. + Has the task of creating all the necessary filtering and indexing structures for parameters estimation + + :param graph_struct: the ``Structure`` object from which infos about the net will be extracted + :type graph_struct: Structure + :_graph: directed graph + :_aggregated_info_about_nodes_parents: a structure that contains all the necessary infos + about every parents of the node of which all the indexing and filtering structures will be constructed. + :_time_scalar_indexing_structure: the indexing structure for state res time estimation + :_transition_scalar_indexing_structure: the indexing structure for transition computation + :_time_filtering: the columns filtering structure used in the computation of the state res times + :_transition_filtering: the columns filtering structure used in the computation of the transition + from one state to another + :_p_combs_structure: all the possible parents states combination for the node of interest + """ + + def __init__(self, graph_struct: Structure): + """Constructor Method + """ + self._graph_struct = graph_struct + self._graph = nx.DiGraph() + self._aggregated_info_about_nodes_parents = None + self._time_scalar_indexing_structure = None + self._transition_scalar_indexing_structure = None + self._time_filtering = None + self._transition_filtering = None + self._p_combs_structure = None + + def init_graph(self): + self.add_nodes(self._nodes_labels) + self.add_edges(self.graph_struct.edges) + self.aggregated_info_about_nodes_parents = self.get_ord_set_of_par_of_all_nodes() + self._fancy_indexing = self.build_fancy_indexing_structure(0) + self.build_scalar_indexing_structures() + self.build_time_columns_filtering_structure() + self.build_transition_columns_filtering_structure() + self._p_combs_structure = self.build_p_combs_structure() + + def fast_init(self, node_id: str) -> None: + """Initializes all the necessary structures for parameters estimation of the node identified by the label + node_id + + :param node_id: the label of the node + :type node_id: string + """ + self.add_nodes(self._graph_struct.nodes_labels) + self.add_edges(self._graph_struct.edges) + self._aggregated_info_about_nodes_parents = self.get_ordered_by_indx_set_of_parents(node_id) + p_indxs = self._aggregated_info_about_nodes_parents[1] + p_vals = self._aggregated_info_about_nodes_parents[2] + node_states = self.get_states_number(node_id) + node_indx = self.get_node_indx(node_id) + cols_number = self._graph_struct.total_variables_number + self._time_scalar_indexing_structure = NetworkGraph.\ + build_time_scalar_indexing_structure_for_a_node(node_states, p_vals) + self._transition_scalar_indexing_structure = NetworkGraph.\ + build_transition_scalar_indexing_structure_for_a_node(node_states, p_vals) + self._time_filtering = NetworkGraph.build_time_columns_filtering_for_a_node(node_indx, p_indxs) + self._transition_filtering = NetworkGraph.build_transition_filtering_for_a_node(node_indx, p_indxs, cols_number) + self._p_combs_structure = NetworkGraph.build_p_comb_structure_for_a_node(p_vals) + + def add_nodes(self, list_of_nodes: typing.List) -> None: + """Adds the nodes to the ``_graph`` contained in the list of nodes ``list_of_nodes``. + Sets all the properties that identify a nodes (index, positional index, cardinality) + + :param list_of_nodes: the nodes to add to ``_graph`` + :type list_of_nodes: List + """ + nodes_indxs = self._graph_struct.nodes_indexes + nodes_vals = self._graph_struct.nodes_values + pos = 0 + for id, node_indx, node_val in zip(list_of_nodes, nodes_indxs, nodes_vals): + self._graph.add_node(id, indx=node_indx, val=node_val, pos_indx=pos) + pos += 1 + + def has_edge(self,edge:tuple)-> bool: + """ + Check if the graph contains a specific edge + + Parameters: + edge: a tuple that rappresents the edge + Returns: + bool + """ + return self._graph.has_edge(edge[0],edge[1]) + + def add_edges(self, list_of_edges: typing.List) -> None: + """Add the edges to the ``_graph`` contained in the list ``list_of_edges``. + + :param list_of_edges: the list containing of tuples containing the edges + :type list_of_edges: List + """ + self._graph.add_edges_from(list_of_edges) + + def remove_node(self, node_id: str) -> None: + """Remove the node ``node_id`` from all the class members. + Initialize all the filtering/indexing structures. + """ + self._graph.remove_node(node_id) + self._graph_struct.remove_node(node_id) + self.clear_indexing_filtering_structures() + + def clear_indexing_filtering_structures(self) -> None: + """Initialize all the filtering/indexing structures. + """ + self._aggregated_info_about_nodes_parents = None + self._time_scalar_indexing_structure = None + self._transition_scalar_indexing_structure = None + self._time_filtering = None + self._transition_filtering = None + self._p_combs_structure = None + + def get_ordered_by_indx_set_of_parents(self, node: str) -> typing.Tuple: + """Builds the aggregated structure that holds all the infos relative to the parent set of the node, namely + (parents_labels, parents_indexes, parents_cardinalities). + + :param node: the label of the node + :type node: string + :return: a tuple containing all the parent set infos + :rtype: Tuple + """ + parents = self.get_parents_by_id(node) + nodes = self._graph_struct.nodes_labels + d = {v: i for i, v in enumerate(nodes)} + sorted_parents = sorted(parents, key=lambda v: d[v]) + get_node_indx = self.get_node_indx + p_indxes = [get_node_indx(node) for node in sorted_parents] + p_values = [self.get_states_number(node) for node in sorted_parents] + return sorted_parents, p_indxes, p_values + + def remove_edges(self, list_of_edges: typing.List) -> None: + """Remove the edges to the graph contained in the list list_of_edges. + + :param list_of_edges: The edges to remove from the graph + :type list_of_edges: List + """ + self._graph.remove_edges_from(list_of_edges) + + @staticmethod + def build_time_scalar_indexing_structure_for_a_node(node_states: int, + parents_vals: typing.List) -> np.ndarray: + """Builds an indexing structure for the computation of state residence times values. + + :param node_states: the node cardinality + :type node_states: int + :param parents_vals: the caridinalites of the node's parents + :type parents_vals: List + :return: The time indexing structure + :rtype: numpy.ndArray + """ + T_vector = np.array([node_states]) + T_vector = np.append(T_vector, parents_vals) + T_vector = T_vector.cumprod().astype(np.int) + return T_vector + + @staticmethod + def build_transition_scalar_indexing_structure_for_a_node(node_states_number: int, parents_vals: typing.List) \ + -> np.ndarray: + """Builds an indexing structure for the computation of state transitions values. + + :param node_states_number: the node cardinality + :type node_states_number: int + :param parents_vals: the caridinalites of the node's parents + :type parents_vals: List + :return: The transition indexing structure + :rtype: numpy.ndArray + """ + M_vector = np.array([node_states_number, + node_states_number]) + M_vector = np.append(M_vector, parents_vals) + M_vector = M_vector.cumprod().astype(np.int) + return M_vector + + @staticmethod + def build_time_columns_filtering_for_a_node(node_indx: int, p_indxs: typing.List) -> np.ndarray: + """ + Builds the necessary structure to filter the desired columns indicated by ``node_indx`` and ``p_indxs`` + in the dataset. + This structute will be used in the computation of the state res times. + :param node_indx: the index of the node + :type node_indx: int + :param p_indxs: the indexes of the node's parents + :type p_indxs: List + :return: The filtering structure for times estimation + :rtype: numpy.ndArray + """ + return np.append(np.array([node_indx], dtype=np.int), p_indxs).astype(np.int) + + @staticmethod + def build_transition_filtering_for_a_node(node_indx: int, p_indxs: typing.List, nodes_number: int) \ + -> np.ndarray: + """Builds the necessary structure to filter the desired columns indicated by ``node_indx`` and ``p_indxs`` + in the dataset. + This structure will be used in the computation of the state transitions values. + :param node_indx: the index of the node + :type node_indx: int + :param p_indxs: the indexes of the node's parents + :type p_indxs: List + :param nodes_number: the total number of nodes in the dataset + :type nodes_number: int + :return: The filtering structure for transitions estimation + :rtype: numpy.ndArray + """ + return np.array([node_indx + nodes_number, node_indx, *p_indxs], dtype=np.int) + + @staticmethod + def build_p_comb_structure_for_a_node(parents_values: typing.List) -> np.ndarray: + """ + Builds the combinatorial structure that contains the combinations of all the values contained in + ``parents_values``. + + :param parents_values: the cardinalities of the nodes + :type parents_values: List + :return: A numpy matrix containing a grid of the combinations + :rtype: numpy.ndArray + """ + tmp = [] + for val in parents_values: + tmp.append([x for x in range(val)]) + if len(parents_values) > 0: + parents_comb = np.array(np.meshgrid(*tmp)).T.reshape(-1, len(parents_values)) + if len(parents_values) > 1: + tmp_comb = parents_comb[:, 1].copy() + parents_comb[:, 1] = parents_comb[:, 0].copy() + parents_comb[:, 0] = tmp_comb + else: + parents_comb = np.array([[]], dtype=np.int) + return parents_comb + + def get_parents_by_id(self, node_id) -> typing.List: + """Returns a list of labels of the parents of the node ``node_id`` + + :param node_id: the node label + :type node_id: string + :return: a List of labels of the parents + :rtype: List + """ + return list(self._graph.predecessors(node_id)) + + def get_states_number(self, node_id) -> int: + return self._graph.nodes[node_id]['val'] + + def get_node_indx(self, node_id) -> int: + return nx.get_node_attributes(self._graph, 'indx')[node_id] + + def get_positional_node_indx(self, node_id) -> int: + return self._graph.nodes[node_id]['pos_indx'] + + @property + def nodes(self) -> typing.List: + return self._graph_struct.nodes_labels + + @property + def edges(self) -> typing.List: + return list(self._graph.edges) + + @property + def nodes_indexes(self) -> np.ndarray: + return self._graph_struct.nodes_indexes + + @property + def nodes_values(self) -> np.ndarray: + return self._graph_struct.nodes_values + + @property + def time_scalar_indexing_strucure(self) -> np.ndarray: + return self._time_scalar_indexing_structure + + @property + def time_filtering(self) -> np.ndarray: + return self._time_filtering + + @property + def transition_scalar_indexing_structure(self) -> np.ndarray: + return self._transition_scalar_indexing_structure + + @property + def transition_filtering(self) -> np.ndarray: + return self._transition_filtering + + @property + def p_combs(self) -> np.ndarray: + return self._p_combs_structure diff --git a/PyCTBN/build/lib/classes/structure_graph/sample_path.py b/PyCTBN/build/lib/classes/structure_graph/sample_path.py new file mode 100644 index 0000000..80b51d9 --- /dev/null +++ b/PyCTBN/build/lib/classes/structure_graph/sample_path.py @@ -0,0 +1,91 @@ + + +import numpy as np +import pandas as pd + +from .structure import Structure +from .trajectory import Trajectory +from ..utility.abstract_importer import AbstractImporter + + + +class SamplePath(object): + """Aggregates all the informations about the trajectories, the real structure of the sampled net and variables + cardinalites. Has the task of creating the objects ``Trajectory`` and ``Structure`` that will + contain the mentioned data. + + :param importer: the Importer object which contains the imported and processed data + :type importer: AbstractImporter + :_trajectories: the ``Trajectory`` object that will contain all the concatenated trajectories + :_structure: the ``Structure`` Object that will contain all the structural infos about the net + :_total_variables_count: the number of variables in the net + """ + def __init__(self, importer: AbstractImporter): + """Constructor Method + """ + self._importer = importer + if self._importer._df_variables is None or self._importer._concatenated_samples is None: + raise RuntimeError('The importer object has to contain the all processed data!') + if self._importer._df_variables.empty: + raise RuntimeError('The importer object has to contain the all processed data!') + if isinstance(self._importer._concatenated_samples, pd.DataFrame): + if self._importer._concatenated_samples.empty: + raise RuntimeError('The importer object has to contain the all processed data!') + if isinstance(self._importer._concatenated_samples, np.ndarray): + if self._importer._concatenated_samples.size == 0: + raise RuntimeError('The importer object has to contain the all processed data!') + self._trajectories = None + self._structure = None + self._total_variables_count = None + + def build_trajectories(self) -> None: + """Builds the Trajectory object that will contain all the trajectories. + Clears all the unused dataframes in ``_importer`` Object + """ + self._trajectories = \ + Trajectory(self._importer.build_list_of_samples_array(self._importer.concatenated_samples), + len(self._importer.sorter) + 1) + self._importer.clear_concatenated_frame() + + def build_structure(self) -> None: + """ + Builds the ``Structure`` object that aggregates all the infos about the net. + """ + if self._importer.sorter != self._importer.variables.iloc[:, 0].to_list(): + raise RuntimeError("The Dataset columns order have to match the order of labels in the variables Frame!") + + self._total_variables_count = len(self._importer.sorter) + labels = self._importer.variables.iloc[:, 0].to_list() + indxs = self._importer.variables.index.to_numpy() + vals = self._importer.variables.iloc[:, 1].to_numpy() + if self._importer.structure is None or self._importer.structure.empty: + edges = [] + else: + edges = list(self._importer.structure.to_records(index=False)) + self._structure = Structure(labels, indxs, vals, edges, + self._total_variables_count) + + def clear_memory(self): + self._importer._raw_data = [] + + @property + def trajectories(self) -> Trajectory: + return self._trajectories + + @property + def structure(self) -> Structure: + return self._structure + + @property + def total_variables_count(self) -> int: + return self._total_variables_count + + @property + def has_prior_net_structure(self) -> bool: + return bool(self._structure.edges) + + + + + + diff --git a/PyCTBN/build/lib/classes/structure_graph/set_of_cims.py b/PyCTBN/build/lib/classes/structure_graph/set_of_cims.py new file mode 100644 index 0000000..81caff5 --- /dev/null +++ b/PyCTBN/build/lib/classes/structure_graph/set_of_cims.py @@ -0,0 +1,97 @@ + + +import typing + +import numpy as np + +from .conditional_intensity_matrix import ConditionalIntensityMatrix + + +class SetOfCims(object): + """Aggregates all the CIMS of the node identified by the label _node_id. + + :param node_id: the node label + :type node_ind: string + :param parents_states_number: the cardinalities of the parents + :type parents_states_number: List + :param node_states_number: the caridinality of the node + :type node_states_number: int + :param p_combs: the p_comb structure bound to this node + :type p_combs: numpy.ndArray + :_state_residence_time: matrix containing all the state residence time vectors for the node + :_transition_matrices: matrix containing all the transition matrices for the node + :_actual_cims: the cims of the node + """ + + def __init__(self, node_id: str, parents_states_number: typing.List, node_states_number: int, p_combs: np.ndarray): + """Constructor Method + """ + self._node_id = node_id + self._parents_states_number = parents_states_number + self._node_states_number = node_states_number + self._actual_cims = [] + self._state_residence_times = None + self._transition_matrices = None + self._p_combs = p_combs + self.build_times_and_transitions_structures() + + def build_times_and_transitions_structures(self) -> None: + """Initializes at the correct dimensions the state residence times matrix and the state transition matrices. + """ + if not self._parents_states_number: + self._state_residence_times = np.zeros((1, self._node_states_number), dtype=np.float) + self._transition_matrices = np.zeros((1, self._node_states_number, self._node_states_number), dtype=np.int) + else: + self._state_residence_times = \ + np.zeros((np.prod(self._parents_states_number), self._node_states_number), dtype=np.float) + self._transition_matrices = np.zeros([np.prod(self._parents_states_number), self._node_states_number, + self._node_states_number], dtype=np.int) + + def build_cims(self, state_res_times: np.ndarray, transition_matrices: np.ndarray) -> None: + """Build the ``ConditionalIntensityMatrix`` objects given the state residence times and transitions matrices. + Compute the cim coefficients.The class member ``_actual_cims`` will contain the computed cims. + + :param state_res_times: the state residence times matrix + :type state_res_times: numpy.ndArray + :param transition_matrices: the transition matrices + :type transition_matrices: numpy.ndArray + """ + for state_res_time_vector, transition_matrix in zip(state_res_times, transition_matrices): + cim_to_add = ConditionalIntensityMatrix(state_res_time_vector, transition_matrix) + cim_to_add.compute_cim_coefficients() + self._actual_cims.append(cim_to_add) + self._actual_cims = np.array(self._actual_cims) + self._transition_matrices = None + self._state_residence_times = None + + def filter_cims_with_mask(self, mask_arr: np.ndarray, comb: typing.List) -> np.ndarray: + """Filter the cims contained in the array ``_actual_cims`` given the boolean mask ``mask_arr`` and the index + ``comb``. + + :param mask_arr: the boolean mask that indicates which parent to consider + :type mask_arr: numpy.array + :param comb: the state/s of the filtered parents + :type comb: numpy.array + :return: Array of ``ConditionalIntensityMatrix`` objects + :rtype: numpy.array + """ + if mask_arr.size <= 1: + return self._actual_cims + else: + flat_indxs = np.argwhere(np.all(self._p_combs[:, mask_arr] == comb, axis=1)).ravel() + return self._actual_cims[flat_indxs] + + @property + def actual_cims(self) -> np.ndarray: + return self._actual_cims + + @property + def p_combs(self) -> np.ndarray: + return self._p_combs + + def get_cims_number(self): + return len(self._actual_cims) + + + + diff --git a/PyCTBN/build/lib/classes/structure_graph/structure.py b/PyCTBN/build/lib/classes/structure_graph/structure.py new file mode 100644 index 0000000..a9d60cc --- /dev/null +++ b/PyCTBN/build/lib/classes/structure_graph/structure.py @@ -0,0 +1,124 @@ + +import typing as ty + +import numpy as np + + +class Structure(object): + """Contains all the infos about the network structure(nodes labels, nodes caridinalites, edges, indexes) + + :param nodes_labels_list: the symbolic names of the variables + :type nodes_labels_list: List + :param nodes_indexes_arr: the indexes of the nodes + :type nodes_indexes_arr: numpy.ndArray + :param nodes_vals_arr: the cardinalites of the nodes + :type nodes_vals_arr: numpy.ndArray + :param edges_list: the edges of the network + :type edges_list: List + :param total_variables_number: the total number of variables in the dataset + :type total_variables_number: int + """ + + def __init__(self, nodes_labels_list: ty.List, nodes_indexes_arr: np.ndarray, nodes_vals_arr: np.ndarray, + edges_list: ty.List, total_variables_number: int): + """Constructor Method + """ + self._nodes_labels_list = nodes_labels_list + self._nodes_indexes_arr = nodes_indexes_arr + self._nodes_vals_arr = nodes_vals_arr + self._edges_list = edges_list + self._total_variables_number = total_variables_number + + def remove_node(self, node_id: str) -> None: + """Remove the node ``node_id`` from all the class members. + The class member ``_total_variables_number`` since it refers to the total number of variables in the dataset. + """ + node_positional_indx = self._nodes_labels_list.index(node_id) + del self._nodes_labels_list[node_positional_indx] + self._nodes_indexes_arr = np.delete(self._nodes_indexes_arr, node_positional_indx) + self._nodes_vals_arr = np.delete(self._nodes_vals_arr, node_positional_indx) + self._edges_list = [(from_node, to_node) for (from_node, to_node) in self._edges_list if (from_node != node_id + and to_node != node_id)] + + @property + def edges(self) -> ty.List: + return self._edges_list + + @property + def nodes_labels(self) -> ty.List: + return self._nodes_labels_list + + @property + def nodes_indexes(self) -> np.ndarray: + return self._nodes_indexes_arr + + @property + def nodes_values(self) -> np.ndarray: + return self._nodes_vals_arr + + @property + def total_variables_number(self) -> int: + return self._total_variables_number + + def get_node_id(self, node_indx: int) -> str: + """Given the ``node_index`` returns the node label. + + :param node_indx: the node index + :type node_indx: int + :return: the node label + :rtype: string + """ + return self._nodes_labels_list[node_indx] + + def clean_structure_edges(self): + self._edges_list = list() + + def add_edge(self,edge: tuple): + self._edges_list.append(tuple) + print(self._edges_list) + + def remove_edge(self,edge: tuple): + self._edges_list.remove(tuple) + + def contains_edge(self,edge:tuple) -> bool: + return edge in self._edges_list + + def get_node_indx(self, node_id: str) -> int: + """Given the ``node_index`` returns the node label. + + :param node_id: the node label + :type node_id: string + :return: the node index + :rtype: int + """ + pos_indx = self._nodes_labels_list.index(node_id) + return self._nodes_indexes_arr[pos_indx] + + def get_positional_node_indx(self, node_id: str) -> int: + return self._nodes_labels_list.index(node_id) + + def get_states_number(self, node: str) -> int: + """Given the node label ``node`` returns the cardinality of the node. + + :param node: the node label + :type node: string + :return: the node cardinality + :rtype: int + """ + pos_indx = self._nodes_labels_list.index(node) + return self._nodes_vals_arr[pos_indx] + + def __repr__(self): + return "Variables:\n" + str(self._nodes_labels_list) +"\nValues:\n"+ str(self._nodes_vals_arr) +\ + "\nEdges: \n" + str(self._edges_list) + + def __eq__(self, other): + """Overrides the default implementation""" + if isinstance(other, Structure): + return set(self._nodes_labels_list) == set(other._nodes_labels_list) and \ + np.array_equal(self._nodes_vals_arr, other._nodes_vals_arr) and \ + np.array_equal(self._nodes_indexes_arr, other._nodes_indexes_arr) and \ + self._edges_list == other._edges_list + + return False + diff --git a/PyCTBN/build/lib/classes/structure_graph/trajectory.py b/PyCTBN/build/lib/classes/structure_graph/trajectory.py new file mode 100644 index 0000000..36899b3 --- /dev/null +++ b/PyCTBN/build/lib/classes/structure_graph/trajectory.py @@ -0,0 +1,45 @@ + +import typing + +import numpy as np + + +class Trajectory(object): + """ Abstracts the infos about a complete set of trajectories, represented as a numpy array of doubles + (the time deltas) and a numpy matrix of ints (the changes of states). + + :param list_of_columns: the list containing the times array and values matrix + :type list_of_columns: List + :param original_cols_number: total number of cols in the data + :type original_cols_number: int + :_actual_trajectory: the trajectory containing also the duplicated/shifted values + :_times: the array containing the time deltas + """ + + def __init__(self, list_of_columns: typing.List, original_cols_number: int): + """Constructor Method + """ + self._times = list_of_columns[0] + self._actual_trajectory = list_of_columns[1] + self._original_cols_number = original_cols_number + + @property + def trajectory(self) -> np.ndarray: + return self._actual_trajectory[:, :self._original_cols_number - 1] + + @property + def complete_trajectory(self) -> np.ndarray: + return self._actual_trajectory + + @property + def times(self): + return self._times + + def size(self): + return self._actual_trajectory.shape[0] + + def __repr__(self): + return "Complete Trajectory Rows: " + str(self.size()) + "\n" + self.complete_trajectory.__repr__() + \ + "\nTimes Rows:" + str(self.times.size) + "\n" + self.times.__repr__() + + diff --git a/PyCTBN/build/lib/classes/utility/__init__.py b/PyCTBN/build/lib/classes/utility/__init__.py new file mode 100644 index 0000000..f79749c --- /dev/null +++ b/PyCTBN/build/lib/classes/utility/__init__.py @@ -0,0 +1,4 @@ +from .abstract_importer import AbstractImporter +from .cache import Cache +from .json_importer import JsonImporter +from .sample_importer import SampleImporter \ No newline at end of file diff --git a/PyCTBN/build/lib/classes/utility/abstract_importer.py b/PyCTBN/build/lib/classes/utility/abstract_importer.py new file mode 100644 index 0000000..1cad352 --- /dev/null +++ b/PyCTBN/build/lib/classes/utility/abstract_importer.py @@ -0,0 +1,164 @@ + +import typing +from abc import ABC, abstractmethod + +import numpy as np +import pandas as pd + +import copy + +#from sklearn.utils import resample + + +class AbstractImporter(ABC): + """Abstract class that exposes all the necessary methods to process the trajectories and the net structure. + + :param file_path: the file path, or dataset name if you import already processed data + :type file_path: str + :param trajectory_list: Dataframe or numpy array containing the concatenation of all the processed trajectories + :type trajectory_list: typing.Union[pandas.DataFrame, numpy.ndarray] + :param variables: Dataframe containing the nodes labels and cardinalities + :type variables: pandas.DataFrame + :prior_net_structure: Dataframe containing the structure of the network (edges) + :type prior_net_structure: pandas.DataFrame + :_sorter: A list containing the variables labels in the SAME order as the columns in ``concatenated_samples`` + + .. warning:: + The parameters ``variables`` and ``prior_net_structure`` HAVE to be properly constructed + as Pandas Dataframes with the following structure: + Header of _df_structure = [From_Node | To_Node] + Header of _df_variables = [Variable_Label | Variable_Cardinality] + See the tutorial on how to construct a correct ``concatenated_samples`` Dataframe/ndarray. + + .. note:: + See :class:``JsonImporter`` for an example implementation + + """ + + def __init__(self, file_path: str = None, trajectory_list: typing.Union[pd.DataFrame, np.ndarray] = None, + variables: pd.DataFrame = None, prior_net_structure: pd.DataFrame = None): + """Constructor + """ + self._file_path = file_path + self._df_samples_list = trajectory_list + self._concatenated_samples = [] + self._df_variables = variables + self._df_structure = prior_net_structure + self._sorter = None + super().__init__() + + @abstractmethod + def build_sorter(self, trajecory_header: object) -> typing.List: + """Initializes the ``_sorter`` class member from a trajectory dataframe, exctracting the header of the frame + and keeping ONLY the variables symbolic labels, cutting out the time label in the header. + + :param trajecory_header: an object that will be used to define the header + :type trajecory_header: object + :return: A list containing the processed header. + :rtype: List + """ + pass + + def compute_row_delta_sigle_samples_frame(self, sample_frame: pd.DataFrame, + columns_header: typing.List, shifted_cols_header: typing.List) \ + -> pd.DataFrame: + """Computes the difference between each value present in th time column. + Copies and shift by one position up all the values present in the remaining columns. + + :param sample_frame: the traj to be processed + :type sample_frame: pandas.Dataframe + :param columns_header: the original header of sample_frame + :type columns_header: List + :param shifted_cols_header: a copy of columns_header with changed names of the contents + :type shifted_cols_header: List + :return: The processed dataframe + :rtype: pandas.Dataframe + + .. warning:: + the Dataframe ``sample_frame`` has to follow the column structure of this header: + Header of sample_frame = [Time | Variable values] + """ + sample_frame = copy.deepcopy(sample_frame) + sample_frame.iloc[:, 0] = sample_frame.iloc[:, 0].diff().shift(-1) + shifted_cols = sample_frame[columns_header].shift(-1).fillna(0).astype('int32') + shifted_cols.columns = shifted_cols_header + sample_frame = sample_frame.assign(**shifted_cols) + sample_frame.drop(sample_frame.tail(1).index, inplace=True) + return sample_frame + + def compute_row_delta_in_all_samples_frames(self, df_samples_list: typing.List) -> None: + """Calls the method ``compute_row_delta_sigle_samples_frame`` on every dataframe present in the list + ``df_samples_list``. + Concatenates the result in the dataframe ``concatanated_samples`` + + :param df_samples_list: the datframe's list to be processed and concatenated + :type df_samples_list: List + + .. warning:: + The Dataframe sample_frame has to follow the column structure of this header: + Header of sample_frame = [Time | Variable values] + The class member self._sorter HAS to be properly INITIALIZED (See class members definition doc) + .. note:: + After the call of this method the class member ``concatanated_samples`` will contain all processed + and merged trajectories + """ + if not self._sorter: + raise RuntimeError("The class member self._sorter has to be INITIALIZED!") + shifted_cols_header = [s + "S" for s in self._sorter] + compute_row_delta = self.compute_row_delta_sigle_samples_frame + proc_samples_list = [compute_row_delta(sample, self._sorter, shifted_cols_header) + for sample in df_samples_list] + self._concatenated_samples = pd.concat(proc_samples_list) + + complete_header = self._sorter[:] + complete_header.insert(0,'Time') + complete_header.extend(shifted_cols_header) + self._concatenated_samples = self._concatenated_samples[complete_header] + + def build_list_of_samples_array(self, concatenated_sample: pd.DataFrame) -> typing.List: + """Builds a List containing the the delta times numpy array, and the complete transitions matrix + + :param concatenated_sample: the dataframe/array from which the time, and transitions matrix have to be extracted + and converted + :type concatenated_sample: pandas.Dataframe + :return: the resulting list of numpy arrays + :rtype: List + """ + + concatenated_array = concatenated_sample.to_numpy() + columns_list = [concatenated_array[:, 0], concatenated_array[:, 1:].astype(int)] + + return columns_list + + def clear_concatenated_frame(self) -> None: + """Removes all values in the dataframe concatenated_samples. + """ + if isinstance(self._concatenated_samples, pd.DataFrame): + self._concatenated_samples = self._concatenated_samples.iloc[0:0] + + @abstractmethod + def dataset_id(self) -> object: + """If the original dataset contains multiple dataset, this method returns a unique id to identify the current + dataset + """ + pass + + @property + def concatenated_samples(self) -> pd.DataFrame: + return self._concatenated_samples + + @property + def variables(self) -> pd.DataFrame: + return self._df_variables + + @property + def structure(self) -> pd.DataFrame: + return self._df_structure + + @property + def sorter(self) -> typing.List: + return self._sorter + + @property + def file_path(self) -> str: + return self._file_path diff --git a/PyCTBN/build/lib/classes/utility/cache.py b/PyCTBN/build/lib/classes/utility/cache.py new file mode 100644 index 0000000..8e0369b --- /dev/null +++ b/PyCTBN/build/lib/classes/utility/cache.py @@ -0,0 +1,58 @@ + +import typing + +from ..structure_graph.set_of_cims import SetOfCims + + +class Cache: + """This class acts as a cache of ``SetOfCims`` objects for a node. + + :__list_of_sets_of_parents: a list of ``Sets`` objects of the parents to which the cim in cache at SAME + index is related + :__actual_cache: a list of setOfCims objects + """ + + def __init__(self): + """Constructor Method + """ + self._list_of_sets_of_parents = [] + self._actual_cache = [] + + def find(self, parents_comb: typing.Set): #typing.Union[typing.Set, str] + """ + Tries to find in cache given the symbolic parents combination ``parents_comb`` the ``SetOfCims`` + related to that ``parents_comb``. + + :param parents_comb: the parents related to that ``SetOfCims`` + :type parents_comb: Set + :return: A ``SetOfCims`` object if the ``parents_comb`` index is found in ``__list_of_sets_of_parents``. + None otherwise. + :rtype: SetOfCims + """ + try: + #print("Cache State:", self.list_of_sets_of_indxs) + #print("Look For:", parents_comb) + result = self._actual_cache[self._list_of_sets_of_parents.index(parents_comb)] + #print("CACHE HIT!!!!", parents_comb) + return result + except ValueError: + return None + + def put(self, parents_comb: typing.Set, socim: SetOfCims): + """Place in cache the ``SetOfCims`` object, and the related symbolic index ``parents_comb`` in + ``__list_of_sets_of_parents``. + + :param parents_comb: the symbolic set index + :type parents_comb: Set + :param socim: the related SetOfCims object + :type socim: SetOfCims + """ + #print("Putting in cache:", parents_comb) + self._list_of_sets_of_parents.append(parents_comb) + self._actual_cache.append(socim) + + def clear(self): + """Clear the contents both of ``__actual_cache`` and ``__list_of_sets_of_parents``. + """ + del self._list_of_sets_of_parents[:] + del self._actual_cache[:] \ No newline at end of file diff --git a/PyCTBN/build/lib/classes/utility/json_importer.py b/PyCTBN/build/lib/classes/utility/json_importer.py new file mode 100644 index 0000000..edff212 --- /dev/null +++ b/PyCTBN/build/lib/classes/utility/json_importer.py @@ -0,0 +1,176 @@ +import json +import typing + +import pandas as pd + + +from .abstract_importer import AbstractImporter + + +class JsonImporter(AbstractImporter): + """Implements the abstracts methods of AbstractImporter and adds all the necessary methods to process and prepare + the data in json extension. + + :param file_path: the path of the file that contains tha data to be imported + :type file_path: string + :param samples_label: the reference key for the samples in the trajectories + :type samples_label: string + :param structure_label: the reference key for the structure of the network data + :type structure_label: string + :param variables_label: the reference key for the cardinalites of the nodes data + :type variables_label: string + :param time_key: the key used to identify the timestamps in each trajectory + :type time_key: string + :param variables_key: the key used to identify the names of the variables in the net + :type variables_key: string + :_array_indx: the index of the outer JsonArray to extract the data from + :type _array_indx: int + :_df_samples_list: a Dataframe list in which every dataframe contains a trajectory + :_raw_data: The raw contents of the json file to import + :type _raw_data: List + """ + + def __init__(self, file_path: str, samples_label: str, structure_label: str, variables_label: str, time_key: str, + variables_key: str): + """Constructor method + + .. note:: + This constructor calls also the method ``read_json_file()``, so after the construction of the object + the class member ``_raw_data`` will contain the raw imported json data. + + """ + self._samples_label = samples_label + self._structure_label = structure_label + self._variables_label = variables_label + self._time_key = time_key + self._variables_key = variables_key + self._df_samples_list = None + self._array_indx = None + super(JsonImporter, self).__init__(file_path) + self._raw_data = self.read_json_file() + + def import_data(self, indx: int) -> None: + """Implements the abstract method of :class:`AbstractImporter`. + + :param indx: the index of the outer JsonArray to extract the data from + :type indx: int + """ + self._array_indx = indx + self._df_samples_list = self.import_trajectories(self._raw_data) + self._sorter = self.build_sorter(self._df_samples_list[0]) + self.compute_row_delta_in_all_samples_frames(self._df_samples_list) + self.clear_data_frame_list() + self._df_structure = self.import_structure(self._raw_data) + self._df_variables = self.import_variables(self._raw_data) + + def import_trajectories(self, raw_data: typing.List) -> typing.List: + """Imports the trajectories from the list of dicts ``raw_data``. + + :param raw_data: List of Dicts + :type raw_data: List + :return: List of dataframes containing all the trajectories + :rtype: List + """ + return self.normalize_trajectories(raw_data, self._array_indx, self._samples_label) + + def import_structure(self, raw_data: typing.List) -> pd.DataFrame: + """Imports in a dataframe the data in the list raw_data at the key ``_structure_label`` + + :param raw_data: List of Dicts + :type raw_data: List + :return: Dataframe containg the starting node a ending node of every arc of the network + :rtype: pandas.Dataframe + """ + return self.one_level_normalizing(raw_data, self._array_indx, self._structure_label) + + def import_variables(self, raw_data: typing.List) -> pd.DataFrame: + """Imports the data in ``raw_data`` at the key ``_variables_label``. + + :param raw_data: List of Dicts + :type raw_data: List + :return: Datframe containg the variables simbolic labels and their cardinalities + :rtype: pandas.Dataframe + """ + return self.one_level_normalizing(raw_data, self._array_indx, self._variables_label) + + def read_json_file(self) -> typing.List: + """Reads the JSON file in the path self.filePath. + + :return: The contents of the json file + :rtype: List + """ + with open(self._file_path) as f: + data = json.load(f) + return data + + def one_level_normalizing(self, raw_data: typing.List, indx: int, key: str) -> pd.DataFrame: + """Extracts the one-level nested data in the list ``raw_data`` at the index ``indx`` at the key ``key``. + + :param raw_data: List of Dicts + :type raw_data: List + :param indx: The index of the array from which the data have to be extracted + :type indx: int + :param key: the key for the Dicts from which exctract data + :type key: string + :return: A normalized dataframe + :rtype: pandas.Datframe + """ + return pd.DataFrame(raw_data[indx][key]) + + def normalize_trajectories(self, raw_data: typing.List, indx: int, trajectories_key: str) -> typing.List: + """ + Extracts the trajectories in ``raw_data`` at the index ``index`` at the key ``trajectories key``. + + :param raw_data: List of Dicts + :type raw_data: List + :param indx: The index of the array from which the data have to be extracted + :type indx: int + :param trajectories_key: the key of the trajectories objects + :type trajectories_key: string + :return: A list of daframes containg the trajectories + :rtype: List + """ + dataframe = pd.DataFrame + smps = raw_data[indx][trajectories_key] + df_samples_list = [dataframe(sample) for sample in smps] + return df_samples_list + + def build_sorter(self, sample_frame: pd.DataFrame) -> typing.List: + """Implements the abstract method build_sorter of the :class:`AbstractImporter` for this dataset. + """ + columns_header = list(sample_frame.columns.values) + columns_header.remove(self._time_key) + return columns_header + + def clear_data_frame_list(self) -> None: + """Removes all values present in the dataframes in the list ``_df_samples_list``. + """ + for indx in range(len(self._df_samples_list)): + self._df_samples_list[indx] = self._df_samples_list[indx].iloc[0:0] + + def dataset_id(self) -> object: + return self._array_indx + + def import_sampled_cims(self, raw_data: typing.List, indx: int, cims_key: str) -> typing.Dict: + """Imports the synthetic CIMS in the dataset in a dictionary, using variables labels + as keys for the set of CIMS of a particular node. + + :param raw_data: List of Dicts + :type raw_data: List + :param indx: The index of the array from which the data have to be extracted + :type indx: int + :param cims_key: the key where the json object cims are placed + :type cims_key: string + :return: a dictionary containing the sampled CIMS for all the variables in the net + :rtype: Dictionary + """ + cims_for_all_vars = {} + for var in raw_data[indx][cims_key]: + sampled_cims_list = [] + cims_for_all_vars[var] = sampled_cims_list + for p_comb in raw_data[indx][cims_key][var]: + cims_for_all_vars[var].append(pd.DataFrame(raw_data[indx][cims_key][var][p_comb]).to_numpy()) + return cims_for_all_vars + + + diff --git a/PyCTBN/build/lib/classes/utility/sample_importer.py b/PyCTBN/build/lib/classes/utility/sample_importer.py new file mode 100644 index 0000000..05073c8 --- /dev/null +++ b/PyCTBN/build/lib/classes/utility/sample_importer.py @@ -0,0 +1,65 @@ +import json +import typing + +import pandas as pd +import numpy as np + +from .abstract_importer import AbstractImporter + + + +class SampleImporter(AbstractImporter): + """Implements the abstracts methods of AbstractImporter and adds all the necessary methods to process and prepare + the data loaded directly by using DataFrame + + :param trajectory_list: the data that describes the trajectories + :type trajectory_list: typing.Union[pd.DataFrame, np.ndarray, typing.List] + :param variables: the data that describes the variables with name and cardinality + :type variables: typing.Union[pd.DataFrame, np.ndarray, typing.List] + :param prior_net_structure: the data of the real structure, if it exists + :type prior_net_structure: typing.Union[pd.DataFrame, np.ndarray, typing.List] + + :_df_samples_list: a Dataframe list in which every dataframe contains a trajectory + :_raw_data: The raw contents of the json file to import + :type _raw_data: List + """ + + def __init__(self, + trajectory_list: typing.Union[pd.DataFrame, np.ndarray, typing.List] = None, + variables: typing.Union[pd.DataFrame, np.ndarray, typing.List] = None, + prior_net_structure: typing.Union[pd.DataFrame, np.ndarray,typing.List] = None): + + 'If the data are not DataFrame, it will be converted' + if isinstance(variables,list) or isinstance(variables,np.ndarray): + variables = pd.DataFrame(variables) + if isinstance(variables,list) or isinstance(variables,np.ndarray): + prior_net_structure=pd.DataFrame(prior_net_structure) + + super(SampleImporter, self).__init__(trajectory_list =trajectory_list, + variables= variables, + prior_net_structure=prior_net_structure) + + def import_data(self, header_column = None): + + if header_column is not None: + self._sorter = header_column + else: + self._sorter = self.build_sorter(self._df_samples_list[0]) + + samples_list= self._df_samples_list + + if isinstance(samples_list, np.ndarray): + samples_list = samples_list.tolist() + + self.compute_row_delta_in_all_samples_frames(samples_list) + + def build_sorter(self, sample_frame: pd.DataFrame) -> typing.List: + """Implements the abstract method build_sorter of the :class:`AbstractImporter` in order to get the ordered variables list. + """ + columns_header = list(sample_frame.columns.values) + del columns_header[0] + return columns_header + + + def dataset_id(self) -> object: + pass \ No newline at end of file diff --git a/PyCTBN/build/lib/tests/__init__.py b/PyCTBN/build/lib/tests/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/PyCTBN/build/lib/tests/__init__.py @@ -0,0 +1 @@ + diff --git a/PyCTBN/setup.py b/PyCTBN/setup.py new file mode 100644 index 0000000..56dd72f --- /dev/null +++ b/PyCTBN/setup.py @@ -0,0 +1,20 @@ +from setuptools import setup, find_packages + + +setup(name='PyCTBN', + version='1.0', + url='https://github.com/philipMartini/PyCTBN', + license='MIT', + author=['Alessandro Bregoli', 'Filippo Martini','Luca Moretti'], + author_email=['a.bregoli1@campus.unimib.it', 'f.martini@campus.unimib.it','lucamoretti96@gmail.com'], + description='A Continuous Time Bayesian Networks Library', + packages=find_packages('.', exclude=['tests']), + #packages=['PyCTBN.PyCTBN'], + install_requires=[ + 'numpy', 'pandas', 'networkx', 'scipy', 'matplotlib', 'tqdm'], + dependency_links=['https://github.com/numpy/numpy', 'https://github.com/pandas-dev/pandas', + 'https://github.com/networkx/networkx', 'https://github.com/scipy/scipy', + 'https://github.com/tqdm/tqdm'], + #long_description=open('../README.md').read(), + zip_safe=False, + python_requires='>=3.6') diff --git a/PyCTBN/tests/__init__.py b/PyCTBN/tests/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/PyCTBN/tests/__init__.py @@ -0,0 +1 @@ + diff --git a/PyCTBN/tests/coverage.xml b/PyCTBN/tests/coverage.xml new file mode 100644 index 0000000..094af83 --- /dev/null +++ b/PyCTBN/tests/coverage.xml @@ -0,0 +1,963 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/PyCTBN/tests/estimators/test_parameters_estimator.py b/PyCTBN/tests/estimators/test_parameters_estimator.py new file mode 100644 index 0000000..5c438db --- /dev/null +++ b/PyCTBN/tests/estimators/test_parameters_estimator.py @@ -0,0 +1,67 @@ + +import unittest +import numpy as np +import glob +import os + +from ...PyCTBN.structure_graph.network_graph import NetworkGraph +from ...PyCTBN.structure_graph.sample_path import SamplePath +from ...PyCTBN.structure_graph.set_of_cims import SetOfCims +from ...PyCTBN.estimators.parameters_estimator import ParametersEstimator +from ...PyCTBN.utility.json_importer import JsonImporter + + +class TestParametersEstimatior(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.read_files = glob.glob(os.path.join('./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json', "*.json")) + cls.array_indx = 0 + cls.importer = JsonImporter('./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json', 'samples', 'dyn.str', 'variables', 'Time', 'Name') + cls.importer.import_data(cls.array_indx) + cls.s1 = SamplePath(cls.importer) + cls.s1.build_trajectories() + cls.s1.build_structure() + print(cls.s1.structure.edges) + print(cls.s1.structure.nodes_values) + + def test_fast_init(self): + for node in self.s1.structure.nodes_labels: + g = NetworkGraph(self.s1.structure) + g.fast_init(node) + p1 = ParametersEstimator(self.s1.trajectories, g) + self.assertEqual(p1._trajectories, self.s1.trajectories) + self.assertEqual(p1._net_graph, g) + self.assertIsNone(p1._single_set_of_cims) + p1.fast_init(node) + self.assertIsInstance(p1._single_set_of_cims, SetOfCims) + + def test_compute_parameters_for_node(self): + for indx, node in enumerate(self.s1.structure.nodes_labels): + print(node) + g = NetworkGraph(self.s1.structure) + g.fast_init(node) + p1 = ParametersEstimator(self.s1.trajectories, g) + p1.fast_init(node) + sofc1 = p1.compute_parameters_for_node(node) + sampled_cims = self.aux_import_sampled_cims('dyn.cims') + sc = list(sampled_cims.values()) + self.equality_of_cims_of_node(sc[indx], sofc1._actual_cims) + + def equality_of_cims_of_node(self, sampled_cims, estimated_cims): + self.assertEqual(len(sampled_cims), len(estimated_cims)) + for c1, c2 in zip(sampled_cims, estimated_cims): + self.cim_equality_test(c1, c2.cim) + + def cim_equality_test(self, cim1, cim2): + for r1, r2 in zip(cim1, cim2): + self.assertTrue(np.all(np.isclose(r1, r2, 1e01))) + + def aux_import_sampled_cims(self, cims_label): + i1 = JsonImporter('./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json', '', '', '', '', '') + raw_data = i1.read_json_file() + return i1.import_sampled_cims(raw_data, self.array_indx, cims_label) + + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/estimators/test_structure_constraint_based_estimator.py b/PyCTBN/tests/estimators/test_structure_constraint_based_estimator.py new file mode 100644 index 0000000..96834dc --- /dev/null +++ b/PyCTBN/tests/estimators/test_structure_constraint_based_estimator.py @@ -0,0 +1,64 @@ + +import glob +import math +import os +import unittest + +import networkx as nx +import numpy as np +import psutil +from line_profiler import LineProfiler + +import json +import pandas as pd + + +from ...PyCTBN.structure_graph.sample_path import SamplePath +from ...PyCTBN.estimators.structure_constraint_based_estimator import StructureConstraintBasedEstimator +from ...PyCTBN.utility.sample_importer import SampleImporter + +import copy + + +class TestStructureConstraintBasedEstimator(unittest.TestCase): + @classmethod + def setUpClass(cls): + with open("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json") as f: + raw_data = json.load(f) + + trajectory_list_raw= raw_data[0]["samples"] + + trajectory_list = [pd.DataFrame(sample) for sample in trajectory_list_raw] + + variables= pd.DataFrame(raw_data[0]["variables"]) + prior_net_structure = pd.DataFrame(raw_data[0]["dyn.str"]) + + + cls.importer = SampleImporter( + trajectory_list=trajectory_list, + variables=variables, + prior_net_structure=prior_net_structure + ) + + cls.importer.import_data() + #cls.s1 = sp.SamplePath(cls.importer) + + #cls.traj = cls.s1.concatenated_samples + + # print(len(cls.traj)) + cls.s1 = SamplePath(cls.importer) + cls.s1.build_trajectories() + cls.s1.build_structure() + + def test_structure(self): + true_edges = copy.deepcopy(self.s1.structure.edges) + true_edges = set(map(tuple, true_edges)) + + se1 = StructureConstraintBasedEstimator(self.s1,0.1,0.1) + edges = se1.estimate_structure(disable_multiprocessing=False) + + + self.assertEqual(edges, true_edges) + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/estimators/test_structure_constraint_based_estimator_server.py b/PyCTBN/tests/estimators/test_structure_constraint_based_estimator_server.py new file mode 100644 index 0000000..a524515 --- /dev/null +++ b/PyCTBN/tests/estimators/test_structure_constraint_based_estimator_server.py @@ -0,0 +1,59 @@ + +import glob +import math +import os +import unittest + +import networkx as nx +import numpy as np +import psutil +from line_profiler import LineProfiler + +from ...PyCTBN.utility.cache import Cache +from ...PyCTBN.structure_graph.sample_path import SamplePath +from ...PyCTBN.estimators.structure_constraint_based_estimator import StructureConstraintBasedEstimator +from ...PyCTBN.utility.json_importer import JsonImporter + +from multiprocessing import set_start_method + +import copy + + +class TestStructureConstraintBasedEstimator(unittest.TestCase): + @classmethod + def setUpClass(cls): + pass + + def test_structure(self): + #cls.read_files = glob.glob(os.path.join('../../data', "*.json")) + self.importer = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + self.s1 = SamplePath(self.importer) + self.s1.build_trajectories() + self.s1.build_structure() + + true_edges = copy.deepcopy(self.s1.structure.edges) + true_edges = set(map(tuple, true_edges)) + + + se1 = StructureConstraintBasedEstimator(self.s1,0.1,0.1) + edges = se1.estimate_structure(disable_multiprocessing=False) + + + self.importer = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + self.s1 = SamplePath(self.importer) + self.s1.build_trajectories() + self.s1.build_structure() + + true_edges = copy.deepcopy(self.s1.structure.edges) + true_edges = set(map(tuple, true_edges)) + + + se1 = StructureConstraintBasedEstimator(self.s1,0.1,0.1) + edges = se1.estimate_structure(disable_multiprocessing=True) + + + + self.assertEqual(edges, true_edges) + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/estimators/test_structure_score_based_estimator.py b/PyCTBN/tests/estimators/test_structure_score_based_estimator.py new file mode 100644 index 0000000..742fa21 --- /dev/null +++ b/PyCTBN/tests/estimators/test_structure_score_based_estimator.py @@ -0,0 +1,82 @@ +import sys +sys.path.append("../../PyCTBN/") +import glob +import math +import os +import unittest + +import networkx as nx +import numpy as np +import psutil +from line_profiler import LineProfiler +import copy + +from ...PyCTBN.utility.cache import Cache +from ...PyCTBN.structure_graph.sample_path import SamplePath +from ...PyCTBN.estimators.structure_score_based_estimator import StructureScoreBasedEstimator +from ...PyCTBN.utility.json_importer import JsonImporter +from ...PyCTBN.utility.sample_importer import SampleImporter + +import json + +import pandas as pd + + + +class TestStructureScoreBasedEstimator(unittest.TestCase): + + @classmethod + def setUpClass(cls): + with open("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json") as f: + raw_data = json.load(f) + + trajectory_list_raw= raw_data[0]["samples"] + + trajectory_list = [pd.DataFrame(sample) for sample in trajectory_list_raw] + + variables= pd.DataFrame(raw_data[0]["variables"]) + prior_net_structure = pd.DataFrame(raw_data[0]["dyn.str"]) + + + cls.importer = SampleImporter( + trajectory_list=trajectory_list, + variables=variables, + prior_net_structure=prior_net_structure + ) + + cls.importer.import_data() + #cls.s1 = sp.SamplePath(cls.importer) + + #cls.traj = cls.s1.concatenated_samples + + # print(len(cls.traj)) + cls.s1 = SamplePath(cls.importer) + cls.s1.build_trajectories() + cls.s1.build_structure() + + + + def test_structure(self): + true_edges = copy.deepcopy(self.s1.structure.edges) + true_edges = set(map(tuple, true_edges)) + + + se1 = StructureScoreBasedEstimator(self.s1,known_edges = [('X','Q')]) + edges = se1.estimate_structure( + max_parents = None, + iterations_number = 100, + patience = 35, + tabu_length = 15, + tabu_rules_duration = 15, + optimizer = 'hill', + disable_multiprocessing=True + ) + + + self.assertEqual(edges, true_edges) + + + +if __name__ == '__main__': + unittest.main() + diff --git a/PyCTBN/tests/estimators/test_structure_score_based_estimator_server.py b/PyCTBN/tests/estimators/test_structure_score_based_estimator_server.py new file mode 100644 index 0000000..b21ea7a --- /dev/null +++ b/PyCTBN/tests/estimators/test_structure_score_based_estimator_server.py @@ -0,0 +1,79 @@ + +import glob +import math +import os +import unittest + +import networkx as nx +import numpy as np +import psutil +from line_profiler import LineProfiler +import copy + +from ...PyCTBN.utility.cache import Cache +from ...PyCTBN.structure_graph.sample_path import SamplePath +from ...PyCTBN.estimators.structure_score_based_estimator import StructureScoreBasedEstimator +from ...PyCTBN.utility.json_importer import JsonImporter + + + +class TestStructureScoreBasedEstimator(unittest.TestCase): + + @classmethod + def setUpClass(cls): + pass + + + + def test_structure(self): + #cls.read_files = glob.glob(os.path.join('../../data', "*.json")) + self.importer = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + self.s1 = SamplePath(self.importer) + self.s1.build_trajectories() + self.s1.build_structure() + + true_edges = copy.deepcopy(self.s1.structure.edges) + true_edges = set(map(tuple, true_edges)) + + + se1 = StructureScoreBasedEstimator(self.s1) + edges = se1.estimate_structure( + max_parents = None, + iterations_number = 100, + patience = 35, + tabu_length = 15, + tabu_rules_duration = 15, + optimizer = 'tabu', + disable_multiprocessing=False + ) + + + self.importer = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + self.s1 = SamplePath(self.importer) + self.s1.build_trajectories() + self.s1.build_structure() + + true_edges = copy.deepcopy(self.s1.structure.edges) + true_edges = set(map(tuple, true_edges)) + + + se1 = StructureScoreBasedEstimator(self.s1) + edges = se1.estimate_structure( + max_parents = None, + iterations_number = 100, + patience = 35, + tabu_length = 15, + tabu_rules_duration = 15, + optimizer = 'tabu', + disable_multiprocessing=True + ) + + + + self.assertEqual(edges, true_edges) + + + +if __name__ == '__main__': + unittest.main() + diff --git a/PyCTBN/tests/optimizers/test_hill_climbing_search.py b/PyCTBN/tests/optimizers/test_hill_climbing_search.py new file mode 100644 index 0000000..2b05236 --- /dev/null +++ b/PyCTBN/tests/optimizers/test_hill_climbing_search.py @@ -0,0 +1,54 @@ + +import glob +import math +import os +import unittest + +import networkx as nx +import numpy as np +import psutil +from line_profiler import LineProfiler +import copy + + +from ...PyCTBN.structure_graph.sample_path import SamplePath +from ...PyCTBN.estimators.structure_score_based_estimator import StructureScoreBasedEstimator +from ...PyCTBN.utility.json_importer import JsonImporter + + + +class TestHillClimbingSearch(unittest.TestCase): + + @classmethod + def setUpClass(cls): + #cls.read_files = glob.glob(os.path.join('../../data', "*.json")) + + + cls.importer = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + cls.importer.import_data(0) + cls.s1 = SamplePath(cls.importer) + cls.s1.build_trajectories() + cls.s1.build_structure() + + + + def test_structure(self): + true_edges = copy.deepcopy(self.s1.structure.edges) + true_edges = set(map(tuple, true_edges)) + + se1 = StructureScoreBasedEstimator(self.s1) + edges = se1.estimate_structure( + max_parents = None, + iterations_number = 40, + patience = None, + optimizer = 'hill' + ) + + + self.assertEqual(edges, true_edges) + + + +if __name__ == '__main__': + unittest.main() + diff --git a/PyCTBN/tests/optimizers/test_tabu_search.py b/PyCTBN/tests/optimizers/test_tabu_search.py new file mode 100644 index 0000000..3349dd7 --- /dev/null +++ b/PyCTBN/tests/optimizers/test_tabu_search.py @@ -0,0 +1,84 @@ +import sys +sys.path.append("../../PyCTBN/") +import glob +import math +import os +import unittest + +import networkx as nx +import numpy as np +import pandas as pd +import psutil +from line_profiler import LineProfiler +import copy +import json + +import utility.cache as ch +import structure_graph.sample_path as sp +import estimators.structure_score_based_estimator as se +import utility.json_importer as ji +import utility.sample_importer as si + + + + + +class TestTabuSearch(unittest.TestCase): + + @classmethod + def setUpClass(cls): + #cls.read_files = glob.glob(os.path.join('../../data', "*.json")) + + with open("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json") as f: + raw_data = json.load(f) + + trajectory_list_raw= raw_data[0]["samples"] + + trajectory_list = [pd.DataFrame(sample) for sample in trajectory_list_raw] + + variables= pd.DataFrame(raw_data[0]["variables"]) + prior_net_structure = pd.DataFrame(raw_data[0]["dyn.str"]) + + + cls.importer = si.SampleImporter( + trajectory_list=trajectory_list, + variables=variables, + prior_net_structure=prior_net_structure + ) + + cls.importer.import_data() + #cls.s1 = sp.SamplePath(cls.importer) + + #cls.traj = cls.s1.concatenated_samples + + # print(len(cls.traj)) + cls.s1 = sp.SamplePath(cls.importer) + cls.s1.build_trajectories() + cls.s1.build_structure() + #cls.s1.clear_memory() + + + + def test_structure(self): + true_edges = copy.deepcopy(self.s1.structure.edges) + true_edges = set(map(tuple, true_edges)) + + se1 = se.StructureScoreBasedEstimator(self.s1) + edges = se1.estimate_structure( + max_parents = None, + iterations_number = 100, + patience = 20, + tabu_length = 10, + tabu_rules_duration = 10, + optimizer = 'tabu', + disable_multiprocessing=False + ) + + + self.assertEqual(edges, true_edges) + + + +if __name__ == '__main__': + unittest.main() + diff --git a/PyCTBN/tests/structure_graph/test_cim.py b/PyCTBN/tests/structure_graph/test_cim.py new file mode 100644 index 0000000..a5fcd75 --- /dev/null +++ b/PyCTBN/tests/structure_graph/test_cim.py @@ -0,0 +1,46 @@ + +import unittest +import numpy as np + +from ...PyCTBN.structure_graph.conditional_intensity_matrix import ConditionalIntensityMatrix + + +class TestConditionalIntensityMatrix(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.state_res_times = np.random.rand(1, 3)[0] + cls.state_res_times = cls.state_res_times * 1000 + cls.state_transition_matrix = np.random.randint(1, 10000, (3, 3)) + for i in range(0, len(cls.state_res_times)): + cls.state_transition_matrix[i, i] = 0 + cls.state_transition_matrix[i, i] = np.sum(cls.state_transition_matrix[i]) + + def test_init(self): + c1 = ConditionalIntensityMatrix(self.state_res_times, self.state_transition_matrix) + self.assertTrue(np.array_equal(self.state_res_times, c1.state_residence_times)) + self.assertTrue(np.array_equal(self.state_transition_matrix, c1.state_transition_matrix)) + self.assertEqual(c1.cim.dtype, np.float) + self.assertEqual(self.state_transition_matrix.shape, c1.cim.shape) + + def test_compute_cim_coefficients(self): + c1 = ConditionalIntensityMatrix(self.state_res_times, self.state_transition_matrix) + c2 = self.state_transition_matrix.astype(np.float) + np.fill_diagonal(c2, c2.diagonal() * -1) + for i in range(0, len(self.state_res_times)): + for j in range(0, len(self.state_res_times)): + c2[i, j] = (c2[i, j] + 1) / (self.state_res_times[i] + 1) + c1.compute_cim_coefficients() + for i in range(0, len(c1.state_residence_times)): + self.assertTrue(np.isclose(np.sum(c1.cim[i]), 0.0, 1e-02, 1e-01)) + for i in range(0, len(self.state_res_times)): + for j in range(0, len(self.state_res_times)): + self.assertTrue(np.isclose(c1.cim[i, j], c2[i, j], 1e-02, 1e-01)) + + def test_repr(self): + c1 = ConditionalIntensityMatrix(self.state_res_times, self.state_transition_matrix) + print(c1) + + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/structure_graph/test_networkgraph.py b/PyCTBN/tests/structure_graph/test_networkgraph.py new file mode 100644 index 0000000..b41c20a --- /dev/null +++ b/PyCTBN/tests/structure_graph/test_networkgraph.py @@ -0,0 +1,190 @@ + +import unittest +import glob +import os +import networkx as nx +import numpy as np +import itertools + +from ...PyCTBN.structure_graph.sample_path import SamplePath +from ...PyCTBN.structure_graph.network_graph import NetworkGraph +from ...PyCTBN.utility.json_importer import JsonImporter + + +class TestNetworkGraph(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.read_files = glob.glob(os.path.join('./PyCTBN/test_data', "*.json")) + cls.importer = JsonImporter(cls.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + cls.importer.import_data(0) + cls.s1 = SamplePath(cls.importer) + cls.s1.build_trajectories() + cls.s1.build_structure() + + def test_init(self): + g1 = NetworkGraph(self.s1.structure) + self.assertEqual(self.s1.structure, g1._graph_struct) + self.assertIsInstance(g1._graph, nx.DiGraph) + self.assertIsNone(g1.time_scalar_indexing_strucure) + self.assertIsNone(g1.transition_scalar_indexing_structure) + self.assertIsNone(g1.transition_filtering) + self.assertIsNone(g1.p_combs) + + def test_add_nodes(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_nodes(self.s1.structure.nodes_labels) + for n1, n2 in zip(g1.nodes, self.s1.structure.nodes_labels): + self.assertEqual(n1, n2) + + def test_add_edges(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_edges(self.s1.structure.edges) + for e in self.s1.structure.edges: + self.assertIn(tuple(e), g1.edges) + + def test_fast_init(self): + g1 = NetworkGraph(self.s1.structure) + for node in self.s1.structure.nodes_labels: + g1.fast_init(node) + self.assertIsNotNone(g1._graph.nodes) + self.assertIsNotNone(g1._graph.edges) + self.assertIsInstance(g1._time_scalar_indexing_structure, np.ndarray) + self.assertIsInstance(g1._transition_scalar_indexing_structure, np.ndarray) + self.assertIsInstance(g1._time_filtering, np.ndarray) + self.assertIsInstance(g1._transition_filtering, np.ndarray) + self.assertIsInstance(g1._p_combs_structure, np.ndarray) + self.assertIsInstance(g1._aggregated_info_about_nodes_parents, tuple) + + def test_get_ordered_by_indx_set_of_parents(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_nodes(self.s1.structure.nodes_labels) + g1.add_edges(self.s1.structure.edges) + for node in self.s1.structure.nodes_labels: + aggr_info = g1.get_ordered_by_indx_set_of_parents(node) + for indx in range(len(aggr_info[0]) - 1 ): + self.assertLess(g1.get_node_indx(aggr_info[0][indx]), g1.get_node_indx(aggr_info[0][indx + 1])) + for par, par_indx in zip(aggr_info[0], aggr_info[1]): + self.assertEqual(g1.get_node_indx(par), par_indx) + for par, par_val in zip(aggr_info[0], aggr_info[2]): + self.assertEqual(g1._graph_struct.get_states_number(par), par_val) + + def test_build_time_scalar_indexing_structure_for_a_node(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_nodes(self.s1.structure.nodes_labels) + g1.add_edges(self.s1.structure.edges) + for node in self.s1.structure.nodes_labels: + aggr_info = g1.get_ordered_by_indx_set_of_parents(node) + self.aux_build_time_scalar_indexing_structure_for_a_node(g1, node, aggr_info[1], + aggr_info[0], aggr_info[2]) + + def aux_build_time_scalar_indexing_structure_for_a_node(self, graph, node_id, parents_indxs, parents_labels, parents_vals): + node_states = graph.get_states_number(node_id) + time_scalar_indexing = NetworkGraph.build_time_scalar_indexing_structure_for_a_node(node_states, parents_vals) + self.assertEqual(len(time_scalar_indexing), len(parents_indxs) + 1) + merged_list = parents_labels[:] + merged_list.insert(0, node_id) + vals_list = [] + for node in merged_list: + vals_list.append(graph.get_states_number(node)) + t_vec = np.array(vals_list) + t_vec = t_vec.cumprod() + self.assertTrue(np.array_equal(time_scalar_indexing, t_vec)) + + def test_build_transition_scalar_indexing_structure_for_a_node(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_nodes(self.s1.structure.nodes_labels) + g1.add_edges(self.s1.structure.edges) + for node in self.s1.structure.nodes_labels: + aggr_info = g1.get_ordered_by_indx_set_of_parents(node) + self.aux_build_transition_scalar_indexing_structure_for_a_node(g1, node, aggr_info[1], + aggr_info[0], aggr_info[2]) + + def aux_build_transition_scalar_indexing_structure_for_a_node(self, graph, node_id, parents_indxs, parents_labels, + parents_values): + node_states = graph.get_states_number(node_id) + transition_scalar_indexing = graph.build_transition_scalar_indexing_structure_for_a_node(node_states, + parents_values) + self.assertEqual(len(transition_scalar_indexing), len(parents_indxs) + 2) + merged_list = parents_labels[:] + merged_list.insert(0, node_id) + merged_list.insert(0, node_id) + vals_list = [] + for node_id in merged_list: + vals_list.append(graph.get_states_number(node_id)) + m_vec = np.array([vals_list]) + m_vec = m_vec.cumprod() + self.assertTrue(np.array_equal(transition_scalar_indexing, m_vec)) + + def test_build_time_columns_filtering_structure_for_a_node(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_nodes(self.s1.structure.nodes_labels) + g1.add_edges(self.s1.structure.edges) + for node in self.s1.structure.nodes_labels: + aggr_info = g1.get_ordered_by_indx_set_of_parents(node) + self.aux_build_time_columns_filtering_structure_for_a_node(g1, node, aggr_info[1]) + + def aux_build_time_columns_filtering_structure_for_a_node(self, graph, node_id, p_indxs): + graph.build_time_columns_filtering_for_a_node(graph.get_node_indx(node_id), p_indxs) + single_filter = [] + single_filter.append(graph.get_node_indx(node_id)) + single_filter.extend(p_indxs) + self.assertTrue(np.array_equal(graph.build_time_columns_filtering_for_a_node(graph.get_node_indx(node_id), + p_indxs),np.array(single_filter))) + def test_build_transition_columns_filtering_structure(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_nodes(self.s1.structure.nodes_labels) + g1.add_edges(self.s1.structure.edges) + for node in self.s1.structure.nodes_labels: + aggr_info = g1.get_ordered_by_indx_set_of_parents(node) + self.aux_build_time_columns_filtering_structure_for_a_node(g1, node, aggr_info[1]) + + def aux_build_transition_columns_filtering_structure(self, graph, node_id, p_indxs): + single_filter = [] + single_filter.append(graph.get_node_indx(node_id) + graph._graph_struct.total_variables_number) + single_filter.append(graph.get_node_indx(node_id)) + single_filter.extend(p_indxs) + self.assertTrue(np.array_equal(graph.build_transition_filtering_for_a_node(graph.get_node_indx(node_id), + + p_indxs), np.array(single_filter))) + def test_build_p_combs_structure(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_nodes(self.s1.structure.nodes_labels) + g1.add_edges(self.s1.structure.edges) + for node in self.s1.structure.nodes_labels: + aggr_info = g1.get_ordered_by_indx_set_of_parents(node) + self.aux_build_p_combs_structure(g1, aggr_info[2]) + + def aux_build_p_combs_structure(self, graph, p_vals): + p_combs = graph.build_p_comb_structure_for_a_node(p_vals) + p_possible_vals = [] + for val in p_vals: + vals = [v for v in range(val)] + p_possible_vals.extend(vals) + comb_struct = set(itertools.product(p_possible_vals,repeat=len(p_vals))) + for comb in comb_struct: + self.assertIn(np.array(comb), p_combs) + + def test_get_parents_by_id(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_nodes(self.s1.structure.nodes_labels) + g1.add_edges(self.s1.structure.edges) + for node in g1.nodes: + self.assertListEqual(g1.get_parents_by_id(node), list(g1._graph.predecessors(node))) + + def test_get_states_number(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_nodes(self.s1.structure.nodes_labels) + g1.add_edges(self.s1.structure.edges) + for node, val in zip(g1.nodes, g1.nodes_values): + self.assertEqual(val, g1.get_states_number(node)) + + def test_get_node_indx(self): + g1 = NetworkGraph(self.s1.structure) + g1.add_nodes(self.s1.structure.nodes_labels) + g1.add_edges(self.s1.structure.edges) + for node, indx in zip(g1.nodes, g1.nodes_indexes): + self.assertEqual(indx, g1.get_node_indx(node)) + + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/structure_graph/test_sample_path.py b/PyCTBN/tests/structure_graph/test_sample_path.py new file mode 100644 index 0000000..3d9635d --- /dev/null +++ b/PyCTBN/tests/structure_graph/test_sample_path.py @@ -0,0 +1,72 @@ + +import unittest +import glob +import os +import random + +from ...PyCTBN.utility.json_importer import JsonImporter +from ...PyCTBN.structure_graph.sample_path import SamplePath +from ...PyCTBN.structure_graph.trajectory import Trajectory +from ...PyCTBN.structure_graph.structure import Structure + + +class TestSamplePath(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.read_files = glob.glob(os.path.join('./PyCTBN/test_data', "*.json")) + + def test_init_not_initialized_importer(self): + importer = JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + self.assertRaises(RuntimeError, SamplePath, importer) + + def test_init_not_filled_dataframse(self): + importer = JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + importer.import_data(0) + importer.clear_concatenated_frame() + self.assertRaises(RuntimeError, SamplePath, importer) + + def test_init(self): + importer = JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + importer.import_data(0) + s1 = SamplePath(importer) + self.assertIsNone(s1.trajectories) + self.assertIsNone(s1.structure) + self.assertFalse(s1._importer.concatenated_samples.empty) + self.assertIsNone(s1._total_variables_count) + + def test_build_trajectories(self): + importer = JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + importer.import_data(0) + s1 = SamplePath(importer) + s1.build_trajectories() + self.assertIsInstance(s1.trajectories, Trajectory) + + def test_build_structure(self): + importer = JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + importer.import_data(0) + s1 = SamplePath(importer) + s1.build_structure() + self.assertIsInstance(s1.structure, Structure) + self.assertEqual(s1._total_variables_count, len(s1._importer.sorter)) + + def test_build_structure_bad_sorter(self): + importer = JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + importer.import_data(0) + s1 = SamplePath(importer) + random.shuffle(importer._sorter) + self.assertRaises(RuntimeError, s1.build_structure) + + def test_build_saplepath_no_prior_net_structure(self): + importer = JsonImporter(self.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + importer.import_data(0) + importer._df_structure = None + s1 = SamplePath(importer) + s1.build_trajectories() + s1.build_structure() + self.assertFalse(s1.structure.edges) + + + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/structure_graph/test_setofcims.py b/PyCTBN/tests/structure_graph/test_setofcims.py new file mode 100644 index 0000000..9f5fbe9 --- /dev/null +++ b/PyCTBN/tests/structure_graph/test_setofcims.py @@ -0,0 +1,133 @@ + +import unittest +import numpy as np +import itertools + +from ...PyCTBN.structure_graph.set_of_cims import SetOfCims + + +class TestSetOfCims(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.node_id = 'X' + cls.possible_cardinalities = [2, 3] + cls.possible_states = [[0,1], [0, 1, 2]] + cls.node_states_number = range(2, 4) + + def test_init(self): + # empty parent set + for sn in self.node_states_number: + p_combs = self.build_p_comb_structure_for_a_node([]) + self.aux_test_init(self.node_id, [], sn, p_combs) + # one parent + for sn in self.node_states_number: + for p in itertools.product(self.possible_cardinalities, repeat=1): + p_combs = self.build_p_comb_structure_for_a_node(list(p)) + self.aux_test_init(self.node_id, list(p), sn, p_combs) + #two parents + for sn in self.node_states_number: + for p in itertools.product(self.possible_cardinalities, repeat=2): + p_combs = self.build_p_comb_structure_for_a_node(list(p)) + self.aux_test_init(self.node_id, list(p), sn, p_combs) + + def test_build_cims(self): + # empty parent set + for sn in self.node_states_number: + p_combs = self.build_p_comb_structure_for_a_node([]) + self.aux_test_build_cims(self.node_id, [], sn, p_combs) + # one parent + for sn in self.node_states_number: + for p in itertools.product(self.possible_cardinalities, repeat=1): + p_combs = self.build_p_comb_structure_for_a_node(list(p)) + self.aux_test_build_cims(self.node_id, list(p), sn, p_combs) + #two parents + for sn in self.node_states_number: + for p in itertools.product(self.possible_cardinalities, repeat=2): + p_combs = self.build_p_comb_structure_for_a_node(list(p)) + self.aux_test_build_cims(self.node_id, list(p), sn, p_combs) + + def test_filter_cims_with_mask(self): + p_combs = self.build_p_comb_structure_for_a_node(self.possible_cardinalities) + sofc1 = SetOfCims('X', self.possible_cardinalities, 3, p_combs) + state_res_times_list = [] + transition_matrices_list = [] + for i in range(len(p_combs)): + state_res_times = np.random.rand(1, 3)[0] + state_res_times = state_res_times * 1000 + state_transition_matrix = np.random.randint(1, 10000, (3, 3)) + state_res_times_list.append(state_res_times) + transition_matrices_list.append(state_transition_matrix) + sofc1.build_cims(np.array(state_res_times_list), np.array(transition_matrices_list)) + for length_of_mask in range(3): + for mask in list(itertools.permutations([True, False],r=length_of_mask)): + m = np.array(mask) + for parent_value in range(self.possible_cardinalities[0]): + cims = sofc1.filter_cims_with_mask(m, [parent_value]) + if length_of_mask == 0 or length_of_mask == 1: + self.assertTrue(np.array_equal(sofc1._actual_cims, cims)) + else: + indxs = self.another_filtering_method(p_combs, m, [parent_value]) + self.assertTrue(np.array_equal(cims, sofc1._actual_cims[indxs])) + + def aux_test_build_cims(self, node_id, p_values, node_states, p_combs): + state_res_times_list = [] + transition_matrices_list = [] + so1 = SetOfCims(node_id, p_values, node_states, p_combs) + for i in range(len(p_combs)): + state_res_times = np.random.rand(1, node_states)[0] + state_res_times = state_res_times * 1000 + state_transition_matrix = np.random.randint(1, 10000, (node_states, node_states)) + state_res_times_list.append(state_res_times) + transition_matrices_list.append(state_transition_matrix) + so1.build_cims(np.array(state_res_times_list), np.array(transition_matrices_list)) + self.assertEqual(len(state_res_times_list), so1.get_cims_number()) + self.assertIsInstance(so1._actual_cims, np.ndarray) + self.assertIsNone(so1._transition_matrices) + self.assertIsNone(so1._state_residence_times) + + def aux_test_init(self, node_id, parents_states_number, node_states_number, p_combs): + sofcims = SetOfCims(node_id, parents_states_number, node_states_number, p_combs) + self.assertEqual(sofcims._node_id, node_id) + self.assertTrue(np.array_equal(sofcims._p_combs, p_combs)) + self.assertTrue(np.array_equal(sofcims._parents_states_number, parents_states_number)) + self.assertEqual(sofcims._node_states_number, node_states_number) + self.assertFalse(sofcims._actual_cims) + self.assertEqual(sofcims._state_residence_times.shape[0], np.prod(np.array(parents_states_number))) + self.assertEqual(len(sofcims._state_residence_times[0]), node_states_number) + self.assertEqual(sofcims._transition_matrices.shape[0], np.prod(np.array(parents_states_number))) + self.assertEqual(len(sofcims._transition_matrices[0][0]), node_states_number) + + def build_p_comb_structure_for_a_node(self, parents_values): + """ + Builds the combinatory structure that contains the combinations of all the values contained in parents_values. + + Parameters: + parents_values: the cardinalities of the nodes + Returns: + a numpy matrix containing a grid of the combinations + """ + tmp = [] + for val in parents_values: + tmp.append([x for x in range(val)]) + if len(parents_values) > 0: + parents_comb = np.array(np.meshgrid(*tmp)).T.reshape(-1, len(parents_values)) + if len(parents_values) > 1: + tmp_comb = parents_comb[:, 1].copy() + parents_comb[:, 1] = parents_comb[:, 0].copy() + parents_comb[:, 0] = tmp_comb + else: + parents_comb = np.array([[]], dtype=np.int) + return parents_comb + + def another_filtering_method(self,p_combs, mask, parent_value): + masked_combs = p_combs[:, mask] + indxs = [] + for indx, val in enumerate(masked_combs): + if val == parent_value: + indxs.append(indx) + return np.array(indxs) + + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/structure_graph/test_structure.py b/PyCTBN/tests/structure_graph/test_structure.py new file mode 100644 index 0000000..a80b91c --- /dev/null +++ b/PyCTBN/tests/structure_graph/test_structure.py @@ -0,0 +1,81 @@ + +import unittest +import numpy as np +from ...PyCTBN.structure_graph.structure import Structure + + +class TestStructure(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.labels = ['X','Y','Z'] + cls.indxs = np.array([0,1,2]) + cls.vals = np.array([3,3,3]) + cls.edges = [('X','Z'),('Y','Z'), ('Z','Y')] + cls.vars_numb = len(cls.labels) + + def test_init(self): + s1 = Structure(self.labels, self.indxs, self.vals, self.edges, self.vars_numb) + self.assertListEqual(self.labels,s1.nodes_labels) + self.assertIsInstance(s1.nodes_indexes, np.ndarray) + self.assertTrue(np.array_equal(self.indxs, s1.nodes_indexes)) + self.assertIsInstance(s1.nodes_values, np.ndarray) + self.assertTrue(np.array_equal(self.vals, s1.nodes_values)) + self.assertListEqual(self.edges, s1.edges) + self.assertEqual(self.vars_numb, s1.total_variables_number) + + def test_get_node_id(self): + s1 = Structure(self.labels, self.indxs, self.vals, self.edges, self.vars_numb) + for indx, var in enumerate(self.labels): + self.assertEqual(var, s1.get_node_id(indx)) + + def test_get_node_indx(self): + l2 = self.labels[:] + l2.remove('Y') + i2 = self.indxs.copy() + np.delete(i2, 1) + v2 = self.vals.copy() + np.delete(v2, 1) + e2 = [('X','Z')] + n2 = self.vars_numb - 1 + s1 = Structure(l2, i2, v2, e2, n2) + for indx, var in zip(i2, l2): + self.assertEqual(indx, s1.get_node_indx(var)) + + def test_get_positional_node_indx(self): + l2 = self.labels[:] + l2.remove('Y') + i2 = self.indxs.copy() + np.delete(i2, 1) + v2 = self.vals.copy() + np.delete(v2, 1) + e2 = [('X', 'Z')] + n2 = self.vars_numb - 1 + s1 = Structure(l2, i2, v2, e2, n2) + for indx, var in enumerate(s1.nodes_labels): + self.assertEqual(indx, s1.get_positional_node_indx(var)) + + def test_get_states_number(self): + l2 = self.labels[:] + l2.remove('Y') + i2 = self.indxs.copy() + np.delete(i2, 1) + v2 = self.vals.copy() + np.delete(v2, 1) + e2 = [('X', 'Z')] + n2 = self.vars_numb - 1 + s1 = Structure(l2, i2, v2, e2, n2) + for val, node in zip(v2, l2): + self.assertEqual(val, s1.get_states_number(node)) + + def test_equality(self): + s1 = Structure(self.labels, self.indxs, self.vals, self.edges, self.vars_numb) + s2 = Structure(self.labels, self.indxs, self.vals, self.edges, self.vars_numb) + self.assertEqual(s1, s2) + + def test_repr(self): + s1 = Structure(self.labels, self.indxs, self.vals, self.edges, self.vars_numb) + print(s1) + + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/structure_graph/test_trajectory.py b/PyCTBN/tests/structure_graph/test_trajectory.py new file mode 100644 index 0000000..b06b17b --- /dev/null +++ b/PyCTBN/tests/structure_graph/test_trajectory.py @@ -0,0 +1,27 @@ + +import unittest +import numpy as np +import glob + +from ...PyCTBN.structure_graph.trajectory import Trajectory +from ...PyCTBN.utility.json_importer import JsonImporter + +class TestTrajectory(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.read_files = glob.glob(os.path.join('./test_data', "*.json")) + cls.importer = JsonImporter(cls.read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + cls.importer.import_data(0) + + def test_init(self): + t1 = Trajectory(self.importer.build_list_of_samples_array(self.importer.concatenated_samples), + len(self.importer.sorter) + 1) + self.assertTrue(np.array_equal(self.importer.concatenated_samples.iloc[:, 0].to_numpy(), t1.times)) + self.assertTrue(np.array_equal(self.importer.concatenated_samples.iloc[:,1:].to_numpy(), t1.complete_trajectory)) + self.assertTrue(np.array_equal(self.importer.concatenated_samples.iloc[:, 1: len(self.importer.sorter) + 1], t1.trajectory)) + self.assertEqual(len(self.importer.sorter) + 1, t1._original_cols_number) + + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/utility/test_cache.py b/PyCTBN/tests/utility/test_cache.py new file mode 100644 index 0000000..5d9c766 --- /dev/null +++ b/PyCTBN/tests/utility/test_cache.py @@ -0,0 +1,57 @@ + +import unittest +import numpy as np + +from ...PyCTBN.utility.cache import Cache +from ...PyCTBN.structure_graph.set_of_cims import SetOfCims + + +class TestCache(unittest.TestCase): + + def test_init(self): + c1 = Cache() + self.assertFalse(c1._list_of_sets_of_parents) + self.assertFalse(c1._actual_cache) + + def test_put(self): + c1 = Cache() + pset1 = {'X', 'Y'} + sofc1 = SetOfCims('Z', [], 3, np.array([])) + c1.put(pset1, sofc1) + self.assertEqual(1, len(c1._actual_cache)) + self.assertEqual(1, len(c1._list_of_sets_of_parents)) + self.assertEqual(sofc1, c1._actual_cache[0]) + pset2 = {'X'} + sofc2 = SetOfCims('Z', [], 3, np.array([])) + c1.put(pset2, sofc2) + self.assertEqual(2, len(c1._actual_cache)) + self.assertEqual(2, len(c1._list_of_sets_of_parents)) + self.assertEqual(sofc2, c1._actual_cache[1]) + + def test_find(self): + c1 = Cache() + pset1 = {'X', 'Y'} + sofc1 = SetOfCims('Z', [], 3, np.array([])) + c1.put(pset1, sofc1) + self.assertEqual(1, len(c1._actual_cache)) + self.assertEqual(1, len(c1._list_of_sets_of_parents)) + self.assertIsInstance(c1.find(pset1), SetOfCims) + self.assertEqual(sofc1, c1.find(pset1)) + self.assertIsInstance(c1.find({'Y', 'X'}), SetOfCims) + self.assertEqual(sofc1, c1.find({'Y', 'X'})) + self.assertIsNone(c1.find({'X'})) + + def test_clear(self): + c1 = Cache() + pset1 = {'X', 'Y'} + sofc1 = SetOfCims('Z', [], 3, np.array([])) + c1.put(pset1, sofc1) + self.assertEqual(1, len(c1._actual_cache)) + self.assertEqual(1, len(c1._list_of_sets_of_parents)) + c1.clear() + self.assertFalse(c1._list_of_sets_of_parents) + self.assertFalse(c1._actual_cache) + + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/utility/test_json_importer.py b/PyCTBN/tests/utility/test_json_importer.py new file mode 100644 index 0000000..9ae1aad --- /dev/null +++ b/PyCTBN/tests/utility/test_json_importer.py @@ -0,0 +1,176 @@ + +import unittest +import os +import glob +import numpy as np +import pandas as pd +from ...PyCTBN.utility.json_importer import JsonImporter + +import json + + + +class TestJsonImporter(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.read_files = glob.glob(os.path.join('./PyCTBN/test_data', "*.json")) + + def test_init(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + self.assertEqual(j1._samples_label, 'samples') + self.assertEqual(j1._structure_label, 'dyn.str') + self.assertEqual(j1._variables_label, 'variables') + self.assertEqual(j1._time_key, 'Time') + self.assertEqual(j1._variables_key, 'Name') + self.assertEqual(j1._file_path, "./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json") + self.assertIsNone(j1._df_samples_list) + self.assertIsNone(j1.variables) + self.assertIsNone(j1.structure) + self.assertEqual(j1.concatenated_samples,[]) + self.assertIsNone(j1.sorter) + self.assertIsNone(j1._array_indx) + self.assertIsInstance(j1._raw_data, list) + + def test_read_json_file_found(self): + data_set = {"key1": [1, 2, 3], "key2": [4, 5, 6]} + with open('data.json', 'w') as f: + json.dump(data_set, f) + path = os.getcwd() + path = path + '/data.json' + j1 = JsonImporter(path, '', '', '', '', '') + self.assertTrue(self.ordered(data_set) == self.ordered(j1._raw_data)) + os.remove('data.json') + + def test_read_json_file_not_found(self): + path = os.getcwd() + path = path + '/data.json' + self.assertRaises(FileNotFoundError, JsonImporter, path, '', '', '', '', '') + + def test_build_sorter(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + df_samples_list = j1.normalize_trajectories(j1._raw_data, 0, j1._samples_label) + sorter = j1.build_sorter(df_samples_list[0]) + self.assertListEqual(sorter, list(df_samples_list[0].columns.values)[1:]) + + def test_normalize_trajectories(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + df_samples_list = j1.normalize_trajectories(j1._raw_data, 0, j1._samples_label) + self.assertEqual(len(df_samples_list), len(j1._raw_data[0][j1._samples_label])) + + def test_normalize_trajectories_wrong_indx(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + self.assertRaises(IndexError, j1.normalize_trajectories, j1._raw_data, 474, j1._samples_label) + + def test_normalize_trajectories_wrong_key(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'sample', 'dyn.str', 'variables', 'Time', 'Name') + self.assertRaises(KeyError, j1.normalize_trajectories, j1._raw_data, 0, j1._samples_label) + + def test_compute_row_delta_single_samples_frame(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + j1._array_indx = 0 + j1._df_samples_list = j1.import_trajectories(j1._raw_data) + sample_frame = j1._df_samples_list[0] + original_copy = sample_frame.copy() + columns_header = list(sample_frame.columns.values) + shifted_cols_header = [s + "S" for s in columns_header[1:]] + new_sample_frame = j1.compute_row_delta_sigle_samples_frame(sample_frame, columns_header[1:], + shifted_cols_header) + self.assertEqual(len(list(sample_frame.columns.values)) + len(shifted_cols_header), + len(list(new_sample_frame.columns.values))) + self.assertEqual(sample_frame.shape[0] - 1, new_sample_frame.shape[0]) + for indx, row in new_sample_frame.iterrows(): + self.assertAlmostEqual(row['Time'], + original_copy.iloc[indx + 1]['Time'] - original_copy.iloc[indx]['Time']) + for indx, row in new_sample_frame.iterrows(): + np.array_equal(np.array(row[columns_header[1:]],dtype=int), + np.array(original_copy.iloc[indx][columns_header[1:]],dtype=int)) + np.array_equal(np.array(row[shifted_cols_header], dtype=int), + np.array(original_copy.iloc[indx + 1][columns_header[1:]], dtype=int)) + + def test_compute_row_delta_in_all_frames(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + j1._array_indx = 0 + j1._df_samples_list = j1.import_trajectories(j1._raw_data) + j1._sorter = j1.build_sorter(j1._df_samples_list[0]) + j1.compute_row_delta_in_all_samples_frames(j1._df_samples_list) + self.assertEqual(list(j1._df_samples_list[0].columns.values), + list(j1.concatenated_samples.columns.values)[:len(list(j1._df_samples_list[0].columns.values))]) + self.assertEqual(list(j1.concatenated_samples.columns.values)[0], j1._time_key) + + def test_compute_row_delta_in_all_frames_not_init_sorter(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + j1._array_indx = 0 + j1._df_samples_list = j1.import_trajectories(j1._raw_data) + self.assertRaises(RuntimeError, j1.compute_row_delta_in_all_samples_frames, j1._df_samples_list) + + def test_clear_data_frame_list(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + j1._array_indx = 0 + j1._df_samples_list = j1.import_trajectories(j1._raw_data) + j1._sorter = j1.build_sorter(j1._df_samples_list[0]) + j1.compute_row_delta_in_all_samples_frames(j1._df_samples_list) + j1.clear_data_frame_list() + for df in j1._df_samples_list: + self.assertTrue(df.empty) + + def test_clear_concatenated_frame(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + j1.import_data(0) + j1.clear_concatenated_frame() + self.assertTrue(j1.concatenated_samples.empty) + + def test_import_variables(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + sorter = ['X', 'Y', 'Z'] + raw_data = [{'variables':{"Name": ['X', 'Y', 'Z'], "value": [3, 3, 3]}}] + j1._array_indx = 0 + df_var = j1.import_variables(raw_data) + self.assertEqual(list(df_var[j1._variables_key]), sorter) + + def test_import_structure(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + raw_data = [{"dyn.str":[{"From":"X","To":"Z"},{"From":"Y","To":"Z"},{"From":"Z","To":"Y"}]}] + j1._array_indx = 0 + df_struct = j1.import_structure(raw_data) + self.assertIsInstance(df_struct, pd.DataFrame) + + def test_import_sampled_cims(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + raw_data = j1.read_json_file() + j1._array_indx = 0 + j1._df_samples_list = j1.import_trajectories(raw_data) + j1._sorter = j1.build_sorter(j1._df_samples_list[0]) + cims = j1.import_sampled_cims(raw_data, 0, 'dyn.cims') + self.assertEqual(list(cims.keys()), j1.sorter) + + def test_dataset_id(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + array_indx = 0 + j1.import_data(array_indx) + self.assertEqual(array_indx, j1.dataset_id()) + + def test_file_path(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + self.assertEqual(j1.file_path, "./PyCTBN/data/networks_and_trajectories_binary_data_01_3.json") + + def test_import_data(self): + j1 = JsonImporter("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json", 'samples', 'dyn.str', 'variables', 'Time', 'Name') + j1.import_data(0) + self.assertEqual(list(j1.variables[j1._variables_key]), + list(j1.concatenated_samples.columns.values[1:len(j1.variables[j1._variables_key]) + 1])) + print(j1.variables) + print(j1.structure) + print(j1.concatenated_samples) + + def ordered(self, obj): + if isinstance(obj, dict): + return sorted((k, self.ordered(v)) for k, v in obj.items()) + if isinstance(obj, list): + return sorted(self.ordered(x) for x in obj) + else: + return obj + + +if __name__ == '__main__': + unittest.main() diff --git a/PyCTBN/tests/utility/test_sample_importer.py b/PyCTBN/tests/utility/test_sample_importer.py new file mode 100644 index 0000000..596e5d2 --- /dev/null +++ b/PyCTBN/tests/utility/test_sample_importer.py @@ -0,0 +1,80 @@ + +import unittest +import os +import glob +import numpy as np +import pandas as pd +from ...PyCTBN.utility.sample_importer import SampleImporter +from ...PyCTBN.structure_graph.sample_path import SamplePath + +import json + + + +class TestSampleImporter(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + with open("./PyCTBN/test_data/networks_and_trajectories_binary_data_01_3.json") as f: + raw_data = json.load(f) + + trajectory_list_raw= raw_data[0]["samples"] + + cls.trajectory_list = [pd.DataFrame(sample) for sample in trajectory_list_raw] + + cls.variables= pd.DataFrame(raw_data[0]["variables"]) + cls.prior_net_structure = pd.DataFrame(raw_data[0]["dyn.str"]) + + + def test_init(self): + sample_importer = SampleImporter( + trajectory_list=self.trajectory_list, + variables=self.variables, + prior_net_structure=self.prior_net_structure + ) + + sample_importer.import_data() + + s1 = SamplePath(sample_importer) + s1.build_trajectories() + s1.build_structure() + s1.clear_memory() + + self.assertEqual(len(s1._importer._df_samples_list), 300) + self.assertIsInstance(s1._importer._df_samples_list,list) + self.assertIsInstance(s1._importer._df_samples_list[0],pd.DataFrame) + self.assertEqual(len(s1._importer._df_variables), 3) + self.assertIsInstance(s1._importer._df_variables,pd.DataFrame) + self.assertEqual(len(s1._importer._df_structure), 2) + self.assertIsInstance(s1._importer._df_structure,pd.DataFrame) + + def test_order(self): + sample_importer = SampleImporter( + trajectory_list=self.trajectory_list, + variables=self.variables, + prior_net_structure=self.prior_net_structure + ) + + sample_importer.import_data() + + s1 = SamplePath(sample_importer) + s1.build_trajectories() + s1.build_structure() + s1.clear_memory() + + for count,var in enumerate(s1._importer._df_samples_list[0].columns[1:]): + self.assertEqual(s1._importer._sorter[count],var) + + + + def ordered(self, obj): + if isinstance(obj, dict): + return sorted((k, self.ordered(v)) for k, v in obj.items()) + if isinstance(obj, list): + return sorted(self.ordered(x) for x in obj) + else: + return obj + + +if __name__ == '__main__': + unittest.main() diff --git a/README.md b/README.md index ba6427e..dfe1815 100644 --- a/README.md +++ b/README.md @@ -1 +1 @@ -# PyCTBN +# CTBN_Project \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d4bb2cb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_build/doctrees/classes.doctree b/docs/_build/doctrees/classes.doctree new file mode 100644 index 0000000..5d6c6b6 Binary files /dev/null and b/docs/_build/doctrees/classes.doctree differ diff --git a/docs/_build/doctrees/classes.estimators.doctree b/docs/_build/doctrees/classes.estimators.doctree new file mode 100644 index 0000000..69a7a93 Binary files /dev/null and b/docs/_build/doctrees/classes.estimators.doctree differ diff --git a/docs/_build/doctrees/classes.optimizers.doctree b/docs/_build/doctrees/classes.optimizers.doctree new file mode 100644 index 0000000..eb97e6b Binary files /dev/null and b/docs/_build/doctrees/classes.optimizers.doctree differ diff --git a/docs/_build/doctrees/classes.structure_graph.doctree b/docs/_build/doctrees/classes.structure_graph.doctree new file mode 100644 index 0000000..2ed9103 Binary files /dev/null and b/docs/_build/doctrees/classes.structure_graph.doctree differ diff --git a/docs/_build/doctrees/classes.utility.doctree b/docs/_build/doctrees/classes.utility.doctree new file mode 100644 index 0000000..fa6dc0a Binary files /dev/null and b/docs/_build/doctrees/classes.utility.doctree differ diff --git a/docs/_build/doctrees/environment.pickle b/docs/_build/doctrees/environment.pickle new file mode 100644 index 0000000..fe70b1d Binary files /dev/null and b/docs/_build/doctrees/environment.pickle differ diff --git a/docs/_build/doctrees/examples.doctree b/docs/_build/doctrees/examples.doctree new file mode 100644 index 0000000..f16ef4e Binary files /dev/null and b/docs/_build/doctrees/examples.doctree differ diff --git a/docs/_build/doctrees/index.doctree b/docs/_build/doctrees/index.doctree new file mode 100644 index 0000000..8db117a Binary files /dev/null and b/docs/_build/doctrees/index.doctree differ diff --git a/docs/_build/doctrees/modules.doctree b/docs/_build/doctrees/modules.doctree new file mode 100644 index 0000000..af89f10 Binary files /dev/null and b/docs/_build/doctrees/modules.doctree differ diff --git a/docs/_build/html/.buildinfo b/docs/_build/html/.buildinfo new file mode 100644 index 0000000..80fb6bc --- /dev/null +++ b/docs/_build/html/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: d42d832abe4d6a9e23dc099c95623ecc +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs/_build/html/_sources/classes.estimators.rst.txt b/docs/_build/html/_sources/classes.estimators.rst.txt new file mode 100644 index 0000000..5ecab8b --- /dev/null +++ b/docs/_build/html/_sources/classes.estimators.rst.txt @@ -0,0 +1,53 @@ +PyCTBN.estimators package +========================== + +Submodules +---------- + +PyCTBN.estimators.fam\_score\_calculator module +------------------------------------------------ + +.. automodule:: PyCTBN.estimators.fam_score_calculator + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.estimators.parameters\_estimator module +----------------------------------------------- + +.. automodule:: PyCTBN.estimators.parameters_estimator + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.estimators.structure\_constraint\_based\_estimator module +----------------------------------------------------------------- + +.. automodule:: PyCTBN.estimators.structure_constraint_based_estimator + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.estimators.structure\_estimator module +---------------------------------------------- + +.. automodule:: PyCTBN.estimators.structure_estimator + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.estimators.structure\_score\_based\_estimator module +------------------------------------------------------------ + +.. automodule:: PyCTBN.estimators.structure_score_based_estimator + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: PyCTBN.estimators + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/_build/html/_sources/classes.optimizers.rst.txt b/docs/_build/html/_sources/classes.optimizers.rst.txt new file mode 100644 index 0000000..0730b68 --- /dev/null +++ b/docs/_build/html/_sources/classes.optimizers.rst.txt @@ -0,0 +1,45 @@ +PyCTBN.optimizers package +========================== + +Submodules +---------- + +PyCTBN.optimizers.constraint\_based\_optimizer module +------------------------------------------------------ + +.. automodule:: PyCTBN.optimizers.constraint_based_optimizer + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.optimizers.hill\_climbing\_search module +------------------------------------------------ + +.. automodule:: PyCTBN.optimizers.hill_climbing_search + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.optimizers.optimizer module +----------------------------------- + +.. automodule:: PyCTBN.optimizers.optimizer + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.optimizers.tabu\_search module +-------------------------------------- + +.. automodule:: PyCTBN.optimizers.tabu_search + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: PyCTBN.optimizers + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/_build/html/_sources/classes.rst.txt b/docs/_build/html/_sources/classes.rst.txt new file mode 100644 index 0000000..0ff219f --- /dev/null +++ b/docs/_build/html/_sources/classes.rst.txt @@ -0,0 +1,21 @@ +PyCTBN package +=============== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + PyCTBN.estimators + PyCTBN.optimizers + PyCTBN.structure_graph + PyCTBN.utility + +Module contents +--------------- + +.. automodule:: PyCTBN + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/_build/html/_sources/classes.structure_graph.rst.txt b/docs/_build/html/_sources/classes.structure_graph.rst.txt new file mode 100644 index 0000000..489b7b6 --- /dev/null +++ b/docs/_build/html/_sources/classes.structure_graph.rst.txt @@ -0,0 +1,77 @@ +PyCTBN.structure\_graph package +================================ + +Submodules +---------- + +PyCTBN.structure\_graph.abstract\_sample\_path module +------------------------------------------------------ + +.. automodule:: PyCTBN.structure_graph.abstract_sample_path + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.conditional\_intensity\_matrix module +-------------------------------------------------------------- + +.. automodule:: PyCTBN.structure_graph.conditional_intensity_matrix + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.network\_graph module +---------------------------------------------- + +.. automodule:: PyCTBN.structure_graph.network_graph + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.sample\_path module +-------------------------------------------- + +.. automodule:: PyCTBN.structure_graph.sample_path + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.set\_of\_cims module +--------------------------------------------- + +.. automodule:: PyCTBN.structure_graph.set_of_cims + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.sets\_of\_cims\_container module +--------------------------------------------------------- + +.. automodule:: PyCTBN.structure_graph.sets_of_cims_container + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.structure module +----------------------------------------- + +.. automodule:: PyCTBN.structure_graph.structure + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.trajectory module +------------------------------------------ + +.. automodule:: PyCTBN.structure_graph.trajectory + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: PyCTBN.structure_graph + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/_build/html/_sources/classes.utility.rst.txt b/docs/_build/html/_sources/classes.utility.rst.txt new file mode 100644 index 0000000..6285db8 --- /dev/null +++ b/docs/_build/html/_sources/classes.utility.rst.txt @@ -0,0 +1,53 @@ +PyCTBN.utility package +======================= + +Submodules +---------- + +PyCTBN.utility.abstract\_importer module +----------------------------------------- + +.. automodule:: PyCTBN.utility.abstract_importer + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.utility.cache module +---------------------------- + +.. automodule:: PyCTBN.utility.cache + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.utility.decorators module +--------------------------------- + +.. automodule:: PyCTBN.utility.decorators + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.utility.json\_importer module +------------------------------------- + +.. automodule:: PyCTBN.utility.json_importer + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.utility.sample\_importer module +--------------------------------------- + +.. automodule:: PyCTBN.utility.sample_importer + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: PyCTBN.utility + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/_build/html/_sources/examples.rst.txt b/docs/_build/html/_sources/examples.rst.txt new file mode 100644 index 0000000..b1e07c5 --- /dev/null +++ b/docs/_build/html/_sources/examples.rst.txt @@ -0,0 +1,121 @@ +Examples +======== + +Installation/Usage +****************** +Download the release in .tar.gz or .whl format and simply use pip install to install it:: + + $pip install PyCTBN-1.0.tar.gz + + +Implementing your own data importer +*********************************** +.. code-block:: python + + """This example demonstrates the implementation of a simple data importer the extends the class abstract importer to import data in csv format. + The net in exam has three ternary nodes and no prior net structure. + """ + + from PyCTBN import AbstractImporter + + class CSVImporter(AbstractImporter): + + def __init__(self, file_path): + self._df_samples_list = None + super(CSVImporter, self).__init__(file_path) + + def import_data(self): + self.read_csv_file() + self._sorter = self.build_sorter(self._df_samples_list[0]) + self.import_variables() + self.compute_row_delta_in_all_samples_frames(self._df_samples_list) + + def read_csv_file(self): + df = pd.read_csv(self._file_path) + df.drop(df.columns[[0]], axis=1, inplace=True) + self._df_samples_list = [df] + + def import_variables(self): + values_list = [3 for var in self._sorter] + # initialize dict of lists + data = {'Name':self._sorter, 'Value':values_list} + # Create the pandas DataFrame + self._df_variables = pd.DataFrame(data) + + def build_sorter(self, sample_frame: pd.DataFrame) -> typing.List: + return list(sample_frame.columns)[1:] + + def dataset_id(self) -> object: + pass + +Parameters Estimation Example +***************************** + +.. code-block:: python + + from PyCTBN import JsonImporter + from PyCTBN import SamplePath + from PyCTBN import NetworkGraph + from PyCTBN import ParametersEstimator + + + def main(): + read_files = glob.glob(os.path.join('./data', "*.json")) #Take all json files in this dir + #import data + importer = JsonImporter(read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + importer.import_data(0) + #Create a SamplePath Obj passing an already filled AbstractImporter object + s1 = SamplePath(importer) + #Build The trajectries and the structural infos + s1.build_trajectories() + s1.build_structure() + print(s1.structure.edges) + print(s1.structure.nodes_values) + #From The Structure Object build the Graph + g = NetworkGraph(s1.structure) + #Select a node you want to estimate the parameters + node = g.nodes[2] + print("Node", node) + #Init the _graph specifically for THIS node + g.fast_init(node) + #Use SamplePath and Grpah to create a ParametersEstimator Object + p1 = ParametersEstimator(s1.trajectories, g) + #Init the peEst specifically for THIS node + p1.fast_init(node) + #Compute the parameters + sofc1 = p1.compute_parameters_for_node(node) + #The est CIMS are inside the resultant SetOfCIms Obj + print(sofc1.actual_cims) + +Structure Estimation Example +**************************** + +.. code-block:: python + + from PyCTBN import JsonImporter + from PyCTBN import SamplePath + from PyCTBN import StructureEstimator + + def structure_estimation_example(): + + # read the json files in ./data path + read_files = glob.glob(os.path.join('./data', "*.json")) + # initialize a JsonImporter object for the first file + importer = JsonImporter(read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + # import the data at index 0 of the outer json array + importer.import_data(0) + # construct a SamplePath Object passing a filled AbstractImporter + s1 = SamplePath(importer) + # build the trajectories + s1.build_trajectories() + # build the real structure + s1.build_structure() + # construct a StructureEstimator object + se1 = StructureEstimator(s1, 0.1, 0.1) + # call the ctpc algorithm + se1.ctpc_algorithm() + # the adjacency matrix of the estimated structure + print(se1.adjacency_matrix()) + # save results to a json file + se1.save_results() + diff --git a/docs/_build/html/_sources/index.rst.txt b/docs/_build/html/_sources/index.rst.txt new file mode 100644 index 0000000..236bf53 --- /dev/null +++ b/docs/_build/html/_sources/index.rst.txt @@ -0,0 +1,21 @@ +.. PyCTBN documentation master file, created by + sphinx-quickstart on Wed Feb 24 18:06:35 2021. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to PyCTBN's documentation! +================================== + +.. toctree:: + :maxdepth: 3 + :caption: Contents: + + examples + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/_build/html/_sources/modules.rst.txt b/docs/_build/html/_sources/modules.rst.txt new file mode 100644 index 0000000..5b5819a --- /dev/null +++ b/docs/_build/html/_sources/modules.rst.txt @@ -0,0 +1,7 @@ +PyCTBN +======= + +.. toctree:: + :maxdepth: 4 + + PyCTBN diff --git a/docs/_build/html/_static/basic.css b/docs/_build/html/_static/basic.css new file mode 100644 index 0000000..24a49f0 --- /dev/null +++ b/docs/_build/html/_static/basic.css @@ -0,0 +1,856 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 450px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a.brackets:before, +span.brackets > a:before{ + content: "["; +} + +a.brackets:after, +span.brackets > a:after { + content: "]"; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +table.footnote td, table.footnote th { + border: 0 !important; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +dl.footnote > dt, +dl.citation > dt { + float: left; + margin-right: 0.5em; +} + +dl.footnote > dd, +dl.citation > dd { + margin-bottom: 0em; +} + +dl.footnote > dd:after, +dl.citation > dd:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dt:after { + content: ":"; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0.5em; + content: ":"; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.doctest > div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.descname { + background-color: transparent; + font-weight: bold; + font-size: 1.2em; +} + +code.descclassname { + background-color: transparent; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/docs/_build/html/_static/css/badge_only.css b/docs/_build/html/_static/css/badge_only.css new file mode 100644 index 0000000..7e17fb1 --- /dev/null +++ b/docs/_build/html/_static/css/badge_only.css @@ -0,0 +1,2 @@ +.fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:0.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-book:before{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}} +/*# sourceMappingURL=badge_only.css.map */ diff --git a/docs/_build/html/_static/css/darker.css b/docs/_build/html/_static/css/darker.css new file mode 100644 index 0000000..ff14f7b --- /dev/null +++ b/docs/_build/html/_static/css/darker.css @@ -0,0 +1,93 @@ +body, .entry-container, +.wy-nav-side, +.wy-side-nav-search, +.fundo-claro, +.wy-menu-vertical li.current, +.rst-content dl:not(.docutils) dt, +code, .rst-content tt, +.wy-side-nav-search > a:hover, .wy-side-nav-search .wy-dropdown > a:hover, +.wy-nav-content{ + background-color: rgb(24, 26, 27) !important; +} + +h2 a, h2 a:visited, h2 a:hover { + color: rgb(209, 206, 199); +} + +body { + color: rgb(209, 206, 199); +} + + +a, a:hover, a:visited { + color: rgb(113, 178, 234); +} + +.wy-menu-vertical a { + color: #b3b3b3; +} + +code, .rst-content tt { + color: #fff8f8; + border: 0; +} + +codeblock, pre.literal-block, .rst-content .literal-block, .rst-content pre.literal-block, div[class^="highlight"] { + border: 1px solid #000; + } + +.rst-content dl:not(.docutils) dl dt { + background: rgb(24, 26, 27) !important; +} + +.wy-side-nav-search > a, .wy-side-nav-search .wy-dropdown > a { + color: #fcfcfc; +} + + +.wy-alert.wy-alert-info, .rst-content .note, .rst-content .wy-alert-info.attention, .rst-content .wy-alert-info.caution, .rst-content .wy-alert-info.danger, .rst-content .wy-alert-info.error, .rst-content .wy-alert-info.hint, .rst-content .wy-alert-info.important, .rst-content .wy-alert-info.tip, .rst-content .wy-alert-info.warning, .rst-content .seealso, .rst-content .wy-alert-info.admonition-todo, +.admonition.note code{ + background: #535050 !important; +} + +@media screen and (min-width: 768px) { +.wy-nav-side{ + width: 224px; +} +.wy-nav-content-wrap{ + margin-left: 200px; + background:rgb(24, 26, 27); +} +} + +.wy-nav-content-wrap{ + margin-left: 200px; + background: #343131; +} + +.hentry { + border-bottom: 2px solid rgb(24, 26, 27); +} + +.wy-alert.wy-alert-danger .wy-alert-title, .rst-content .wy-alert-danger.note .wy-alert-title, .rst-content .wy-alert-danger.attention .wy-alert-title, .rst-content .wy-alert-danger.caution .wy-alert-title, .rst-content .danger .wy-alert-title, .rst-content .error .wy-alert-title, .rst-content .wy-alert-danger.hint .wy-alert-title, .rst-content .wy-alert-danger.important .wy-alert-title, .rst-content .wy-alert-danger.tip .wy-alert-title, .rst-content .wy-alert-danger.warning .wy-alert-title, .rst-content .wy-alert-danger.seealso .wy-alert-title, .rst-content .wy-alert-danger.admonition-todo .wy-alert-title, .wy-alert.wy-alert-danger .rst-content .admonition-title, .rst-content .wy-alert.wy-alert-danger .admonition-title, .rst-content .wy-alert-danger.note .admonition-title, .rst-content .wy-alert-danger.attention .admonition-title, .rst-content .wy-alert-danger.caution .admonition-title, .rst-content .danger .admonition-title, .rst-content .error .admonition-title, .rst-content .wy-alert-danger.hint .admonition-title, .rst-content .wy-alert-danger.important .admonition-title, .rst-content .wy-alert-danger.tip .admonition-title, .rst-content .wy-alert-danger.warning .admonition-title, .rst-content .wy-alert-danger.seealso .admonition-title, .rst-content .wy-alert-danger.admonition-todo .admonition-title { + background: #db655a; +} + +.wy-alert.wy-alert-danger, .rst-content .wy-alert-danger.note, .rst-content .wy-alert-danger.attention, .rst-content .wy-alert-danger.caution, .rst-content .danger, .rst-content .error, .rst-content .wy-alert-danger.hint, .rst-content .wy-alert-danger.important, .rst-content .wy-alert-danger.tip, .rst-content .wy-alert-danger.warning, .rst-content .wy-alert-danger.seealso, .rst-content .wy-alert-danger.admonition-todo { + background: #e18279; + color: #fff; +} + +.wy-alert.wy-alert-warning, .rst-content .wy-alert-warning.note, .rst-content .attention, .rst-content .caution, .rst-content .wy-alert-warning.danger, .rst-content .wy-alert-warning.error, .rst-content .wy-alert-warning.hint, .rst-content .wy-alert-warning.important, .rst-content .wy-alert-warning.tip, .rst-content .warning, .rst-content .wy-alert-warning.seealso, .rst-content .admonition-todo { + background: #ca9f52; + color: #fff; +} + +.wy-alert.wy-alert-warning .wy-alert-title, .rst-content .wy-alert-warning.note .wy-alert-title, .rst-content .attention .wy-alert-title, .rst-content .caution .wy-alert-title, .rst-content .wy-alert-warning.danger .wy-alert-title, .rst-content .wy-alert-warning.error .wy-alert-title, .rst-content .wy-alert-warning.hint .wy-alert-title, .rst-content .wy-alert-warning.important .wy-alert-title, .rst-content .wy-alert-warning.tip .wy-alert-title, .rst-content .warning .wy-alert-title, .rst-content .wy-alert-warning.seealso .wy-alert-title, .rst-content .admonition-todo .wy-alert-title, .wy-alert.wy-alert-warning .rst-content .admonition-title, .rst-content .wy-alert.wy-alert-warning .admonition-title, .rst-content .wy-alert-warning.note .admonition-title, .rst-content .attention .admonition-title, .rst-content .caution .admonition-title, .rst-content .wy-alert-warning.danger .admonition-title, .rst-content .wy-alert-warning.error .admonition-title, .rst-content .wy-alert-warning.hint .admonition-title, .rst-content .wy-alert-warning.important .admonition-title, .rst-content .wy-alert-warning.tip .admonition-title, .rst-content .warning .admonition-title, .rst-content .wy-alert-warning.seealso .admonition-title, .rst-content .admonition-todo .admonition-title { + background: #ca7a35; +} + + +.wy-body-for-nav { + background-image: none !important; +} diff --git a/docs/_build/html/_static/css/pdj.css b/docs/_build/html/_static/css/pdj.css new file mode 100644 index 0000000..8445eca --- /dev/null +++ b/docs/_build/html/_static/css/pdj.css @@ -0,0 +1,494 @@ +@import url("theme.css"); + +.wy-side-nav-search{ + background-color: #595C5E; +} + +.wy-side-nav-search input[type=text] { + border-color: #595C5E; +} + +@media screen and (min-width: 768px) { +.wy-nav-side{ + width: 224px; +} +.wy-nav-content-wrap{ + margin-left: 200px; + background: #343131; +} +} +@media screen and (max-width: 768px) { + .wy-nav-content{ + background-color: #343131; + } +} + +.wy-nav-top{ + background-color: #595C5E; +} +.wy-nav-content{ + padding-top: 1%; + max-width: 100%; + background-color: #343131; +} + +.rst-content{ + margin-left: -3%; +} + +pre{ + font-size: 0.9em; + padding: 1%; +} + +pre > span, +pre > p{ + margin: 0; + font-size: 1em; +} + +pre > span{ + margin: 0; +} + +.hentry{ + padding-top: 2%; + padding-bottom: 5%; + padding-left: 3%; + padding-right: 3%; + border-bottom: 2px solid #343131; +} + +.entry-container{ + background-color: #fff; /*#EAEAEA; */ + border-radius: 3px; + /* -webkit-box-shadow: 0 0 10px 8px rgba(50, 50, 50, 0.75); */ + /* -moz-box-shadow: 0 0 10px 8px rgba(50, 50, 50, 0.75); */ + /* box-shadow: 0 0 10px 8px rgba(50, 50, 50, 0.75); */ + -webkit-box-shadow: 0 0 10px 8px rgba(2, 2, 2, 0.36); + -moz-box-shadow: 0 0 10px 8px rgba(2, 2, 2, 0.36); + box-shadow: 0 0 10px 8px rgba(2, 2, 2, 0.36); + position: relative; + z-index: 210; +} + +.entry-content{ + margin-right: 3%; +} + +.wy-side-nav-search{ + padding:0.639em +} + + +.wy-menu-vertical a:hover{ + -webkit-box-shadow: 0 0 5px 5px rgba(20, 20, 20, 0.30); + -moz-box-shadow: 0 0 5px 5px rgba(20, 20, 20, 0.30); + box-shadow: 0 0 5px 5px rgba(20, 20, 20, 0.30); +} + +.wy-menu-vertical a{ + width: 90% +} + +#comments, +#comment-form{ + padding: 20px; +} + + +@media screen and (max-width: 1330px) { + .fancybox img{ + width: 310px; /* 88px / 633px */ + height: 206px; + } +} + +@media screen and (max-width: 1241px) { + .fancybox img{ + width: 280px; + height: 186px; + } +} + +@media screen and (max-width: 1154px) { + .fancybox img{ + width: 260px; + height: 173px; + } +} + +.expander { + position: absolute; + top: 5px; + left: 5px; + width: 16px; + height: 16px; + padding: 4px; + background: white url(/static/blog/img/fsbtn.png) center center no-repeat; + z-index: 99999; + cursor: pointer; +} + +/* 340 */ +/* .rst-content img { */ +/* margin-top: 8px; */ +/* margin-left: 10px; */ +/* margin-bottom: 5px; */ +/* } */ + +/* 260 */ +.rst-content .row img { + margin-top: 8px; + margin-left: 5px; + border-radius: 3px; + -webkit-box-shadow: 0 0 10px 5px rgba(2, 2, 2, 0.36); + -moz-box-shadow: 0 0 10px 5px /*rgba(50, 50, 50, 0.75)*/ rgba(2, 2, 2, 0.36); + box-shadow: 0 0 10px 5px rgba(2, 2, 2, 0.36); + +} + +.rst-content{ + z-index: 210; + margin-top: -60px; +} + +h2 a, +h2 a:visited, +h2 a:hover{ + color: #404040; +} + +.wy-menu-vertical li.on a, .wy-menu-vertical li.current>a { + color: #b3b3b3; + background: #4e4a4a; + box-shadow: 0 0 5px 5px rgba(20, 20, 20, 0.30); + border: none; +} + +.wy-menu-vertical li.current { + background: #343131; + border: none; +} + +.wy-menu-vertical li.current a{ + border:none +} + +.wy-alert.wy-alert-info .wy-alert-title, .rst-content .note .wy-alert-title, .rst-content .wy-alert-info.attention .wy-alert-title, .rst-content .wy-alert-info.caution .wy-alert-title, .rst-content .wy-alert-info.danger .wy-alert-title, .rst-content .wy-alert-info.error .wy-alert-title, .rst-content .wy-alert-info.hint .wy-alert-title, .rst-content .wy-alert-info.important .wy-alert-title, .rst-content .wy-alert-info.tip .wy-alert-title, .rst-content .wy-alert-info.warning .wy-alert-title, .rst-content .seealso .wy-alert-title, .rst-content .wy-alert-info.admonition-todo .wy-alert-title, .wy-alert.wy-alert-info .rst-content .admonition-title, .rst-content .wy-alert.wy-alert-info .admonition-title, .rst-content .note .admonition-title, .rst-content .wy-alert-info.attention .admonition-title, .rst-content .wy-alert-info.caution .admonition-title, .rst-content .wy-alert-info.danger .admonition-title, .rst-content .wy-alert-info.error .admonition-title, .rst-content .wy-alert-info.hint .admonition-title, .rst-content .wy-alert-info.important .admonition-title, .rst-content .wy-alert-info.tip .admonition-title, .rst-content .wy-alert-info.warning .admonition-title, .rst-content .seealso .admonition-title, .rst-content .wy-alert-info.admonition-todo .admonition-title { + background: #47494a; +} + +.wy-alert.wy-alert-info, .rst-content .note, .rst-content .wy-alert-info.attention, .rst-content .wy-alert-info.caution, .rst-content .wy-alert-info.danger, .rst-content .wy-alert-info.error, .rst-content .wy-alert-info.hint, .rst-content .wy-alert-info.important, .rst-content .wy-alert-info.tip, .rst-content .wy-alert-info.warning, .rst-content .seealso, .rst-content .wy-alert-info.admonition-todo { + background: #ececec; +} + +.wy-alert.wy-alert-danger, .rst-content .wy-alert-danger.note, .rst-content .wy-alert-danger.attention, .rst-content .wy-alert-danger.caution, .rst-content .danger, .rst-content .error, .rst-content .wy-alert-danger.hint, .rst-content .wy-alert-danger.important, .rst-content .wy-alert-danger.tip, .rst-content .wy-alert-danger.warning, .rst-content .wy-alert-danger.seealso, .rst-content .wy-alert-danger.admonition-todo { + background: #f0d5d2; +} + +.wy-menu-vertical li.on a:hover, +.wy-menu-vertical li.current>a:hover { + background: #4e4a4a; + +} + +.wy-menu-vertical li.current a:hover { + background: #4e4a4a; +} + +wy-menu-vertical li.toctree-l2.current>a { + color: #343131 !important; +} + +.wy-menu-vertical li.toctree-l2.current>a { + background: #343131; + padding: 0.4045em 2.427em; + box-shadow: none; +} + +.highlight{ + background: #000 !important; +} + +.highlight pre, +.highlight .mi, +.highlight .go{ + color: #57de4e !important; + font-size: 0.9em !important; +} + +.highlight .nt{ + color: #8080ec; +} + +.highlight .n, .highlight .nn, +.highlight .p, .highlight .o, +.highlight .nv, +.highlight .gp{ + color: #57de4e; +} + +.highlight .kn{ + color: #47ffff !important; +} + +.highlight .nc, +.highlight .nd{ + color: #1c909e; + font-weight: normal; +} + + +.highlight .k, .highlight .bp{ + color: #47ffff !important; +} + +.highlight .kc { + color: #ae39ff; + font-weight: normal !important; +} + +.highlight .nf, +.highlight .nb{ + color: #87CEFA; +} + +.highlight .s1, .highlight .s2, .highlight .s, +.highlight .sd, .highlight .si, +.highlight .se{ + color: #FDF5E6; +} + +.highlight .ow{ + color: #47ffff; +} + +.highlight .c1, +.highlight .c{ + color: #d80e04; +} + +.fundo-claro{ + height: 88px; + background-color: #595C5E; + margin-left: -10%; + margin-top: -1.1%; + margin-right: -10%; +} + + +@media screen and (max-width: 980px) { + pre{ + overflow-x: scroll; + } +} + + +@media screen and (max-width: 768px){ + .fundo-claro{ + display:none; + } + + .rst-content{ + z-index: 210; + margin-top: -20px; + } + +} + +.page{ + width: 3%; + float:left; +} + +.previous{ + width: 10% +} + +.paginator{ + margin-top: 50px; + padding-bottom: 25px; + padding-left: 20px; +} + +blockquote { + font-style: italic; + margin: 0 4.5em; + position: relative; +} + +blockquote, q { + quotes: "" ""; +} + +blockquote:before { + color: #807f7f; + content: "\201C"; + display: block; + font-family: "Droid Serif", "Times New Roman", serif; + font-size: 48px; + font-size: 4.8rem; + font-style: normal; + font-weight: bold; + line-height: 1; + position: absolute; + top: -15px; + left: -40px; +} + +.flatpage{ + +} + +.bkg-escuro{ + background-color: #343131; + color: #b3b3b3; +} + +.search-topo{ + float: right; + padding-right: 7%; + margin-top: -1.5; +} + +.fa-home:before, .icon-home:before { + content: url('../img/porao-branco.png'); +} + + +/* fundo preto */ + +.entry-container-escuro{ + background-color: #343131; + color: #b3b3b3; + padding: 15px; +} + +.entry-container-escuro h2, +.entry-container-escuro h2 a, +.entry-container-escuro h2 a:visited, +.entry-container-escuro h2 a:hover, +.entry-container-escuro h4, +.entry-container-escuro .entry-info abbr{ + color: #F9F4F4; +} + + +/* fim fundo preto */ + +.classe-correio textarea{ + width: 60%; + min-height: 300px; + margin-left: -39.5%; + border-radius: 15px; +} +.classe-correio { + background-color: #343131; +} + +.classe-correio .nome-email{ + width: 30%; + margin-bottom: 10px; +} + +/* cor dos links */ +a:visited{ + color: #2980B9; +} + +.wy-menu-vertical a { + color: #b3b3b3; +} + +.wy-side-nav-search>a, .wy-side-nav-search .wy-dropdown>a { + color: #fcfcfc; +} + +.highlight-yaml .nt{ + color: #eac648; +} + + +/* .rst-content dl:not(.docutils) { */ +/* margin-bottom: 24px; */ +/* background-color: #000; */ +/* } */ + +/* .rst-content dl:not(.docutils) code{ */ +/* color: #1bb41b; */ +/* border: none; */ +/* background-color: #000; */ +/* } */ + +.rst-content dl:not(.docutils) dt{ + background-color: #dcdfe2 !important; + border-top: 3px solid #969798; +} + +/* dl.method > dt{ */ +/* background-color: #000 !important; */ +/* color: #17deb0 !important; */ +/* border: 1px solid #858181 !important; */ +/* border-left: 3px solid #858181 !important; */ +/* } */ + +/* .sig-name{ */ +/* background-color: #000; */ +/* border: none; */ +/* color: #1bb41b; */ +/* font-size: 1.0em; */ +/* } */ + +/* .rst-content dl p, .rst-content dl table, .rst-content dl ul, .rst-content dl ol { */ +/* color: #FDF5E6 !important; */ +/* font-size: 0.95em !important; */ +/* } */ + +code, .rst-content tt { + color: #000; + font-weight: bold; + font-size: 85%; + border-color: #92989a; + } + +.reference code{ + color: #2980B9; + font-weight: bold; + font-size: 85%; + } + +.highlight-sh .nb, .highlight-sh .se{ + color: #57de4e !important; +} + +#rtd-search-form { + width: 85%; +} + +footer { + color: #999; + z-index: 211; + position: relative; + margin-top: 10px; +} + +.highlight-cfg .k, .highlight .bp { + color: #57de4e !important; + font-weight: normal; +} + +.highlight-cfg .na { + color: #eac648; +} + +.highlight-cfg .n, +.highlight-cfg .nn, +.highlight-cfg .p, +.highlight-cfg .o, +.highlight-cfg .nv, +.highlight-cfg .gp, +.highlight-cfg .s { + color: #57de4e; + font-weight: normal; +} diff --git a/docs/_build/html/_static/css/theme.css b/docs/_build/html/_static/css/theme.css new file mode 100644 index 0000000..390d706 --- /dev/null +++ b/docs/_build/html/_static/css/theme.css @@ -0,0 +1,5 @@ +*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}audio:not([controls]){display:none}[hidden]{display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:hover,a:active{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:bold}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;color:#000;text-decoration:none}mark{background:#ff0;color:#000;font-style:italic;font-weight:bold}pre,code,.rst-content tt,kbd,samp{font-family:monospace,serif;_font-family:"courier new",monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:before,q:after{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}ul,ol,dl{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure{margin:0}form{margin:0}fieldset{border:0;margin:0;padding:0}label{cursor:pointer}legend{border:0;*margin-left:-7px;padding:0;white-space:normal}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type="checkbox"],input[type="radio"]{box-sizing:border-box;padding:0;*width:13px;*height:13px}input[type="search"]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}input[type="search"]::-webkit-search-decoration,input[type="search"]::-webkit-search-cancel-button{-webkit-appearance:none}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}textarea{overflow:auto;vertical-align:top;resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:0.2em 0;background:#ccc;color:#000;padding:0.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none !important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{html,body,section{background:none !important}*{box-shadow:none !important;text-shadow:none !important;filter:none !important;-ms-filter:none !important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}}.fa:before,.rst-content .admonition-title:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content dl dt .headerlink:before,.icon:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-alert,.rst-content .note,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .warning,.rst-content .seealso,.rst-content .admonition-todo,.btn,input[type="text"],input[type="password"],input[type="email"],input[type="url"],input[type="date"],input[type="month"],input[type="time"],input[type="datetime"],input[type="datetime-local"],input[type="week"],input[type="number"],input[type="search"],input[type="tel"],input[type="color"],select,textarea,.wy-menu-vertical li.on a,.wy-menu-vertical li.current>a,.wy-side-nav-search>a,.wy-side-nav-search .wy-dropdown>a,.wy-nav-top a{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.2.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:'FontAwesome';src:url("../fonts/fontawesome-webfont.eot?v=4.2.0");src:url("../fonts/fontawesome-webfont.eot?#iefix&v=4.2.0") format("embedded-opentype"),url("../fonts/fontawesome-webfont.woff?v=4.2.0") format("woff"),url("../fonts/fontawesome-webfont.ttf?v=4.2.0") format("truetype"),url("../fonts/fontawesome-webfont.svg?v=4.2.0#fontawesomeregular") format("svg");font-weight:normal;font-style:normal}.fa,.rst-content .admonition-title,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content dl dt .headerlink,.icon{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:0.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:0.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:solid 0.08em #eee;border-radius:.1em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.rst-content .pull-left.admonition-title,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content dl dt .pull-left.headerlink,.pull-left.icon{margin-right:.3em}.fa.pull-right,.rst-content .pull-right.admonition-title,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content dl dt .pull-right.headerlink,.pull-right.icon{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s infinite linear;animation:fa-spin 2s infinite linear}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}100%{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}100%{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=1);-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=2);-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=3);-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=0);-webkit-transform:scale(-1, 1);-ms-transform:scale(-1, 1);transform:scale(-1, 1)}.fa-flip-vertical{filter:progid:DXImageTransform.Microsoft.BasicImage(rotation=2);-webkit-transform:scale(1, -1);-ms-transform:scale(1, -1);transform:scale(1, -1)}:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270,:root .fa-flip-horizontal,:root .fa-flip-vertical{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-remove:before,.fa-close:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-gear:before,.fa-cog:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-rotate-right:before,.fa-repeat:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-photo:before,.fa-image:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.rst-content .admonition-title:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-warning:before,.fa-exclamation-triangle:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-gears:before,.fa-cogs:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-save:before,.fa-floppy-o:before{content:""}.fa-square:before{content:""}.fa-navicon:before,.fa-reorder:before,.fa-bars:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.wy-dropdown .caret:before,.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-unsorted:before,.fa-sort:before{content:""}.fa-sort-down:before,.fa-sort-desc:before{content:""}.fa-sort-up:before,.fa-sort-asc:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-legal:before,.fa-gavel:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-flash:before,.fa-bolt:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-paste:before,.fa-clipboard:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-unlink:before,.fa-chain-broken:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-toggle-down:before,.fa-caret-square-o-down:before{content:""}.fa-toggle-up:before,.fa-caret-square-o-up:before{content:""}.fa-toggle-right:before,.fa-caret-square-o-right:before{content:""}.fa-euro:before,.fa-eur:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-rupee:before,.fa-inr:before{content:""}.fa-cny:before,.fa-rmb:before,.fa-yen:before,.fa-jpy:before{content:""}.fa-ruble:before,.fa-rouble:before,.fa-rub:before{content:""}.fa-won:before,.fa-krw:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-toggle-left:before,.fa-caret-square-o-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-turkish-lira:before,.fa-try:before{content:""}.fa-plus-square-o:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-institution:before,.fa-bank:before,.fa-university:before{content:""}.fa-mortar-board:before,.fa-graduation-cap:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-photo-o:before,.fa-file-picture-o:before,.fa-file-image-o:before{content:""}.fa-file-zip-o:before,.fa-file-archive-o:before{content:""}.fa-file-sound-o:before,.fa-file-audio-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-saver:before,.fa-support:before,.fa-life-ring:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before{content:""}.fa-ge:before,.fa-empire:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-send:before,.fa-paper-plane:before{content:""}.fa-send-o:before,.fa-paper-plane-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-soccer-ball-o:before,.fa-futbol-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-shekel:before,.fa-sheqel:before,.fa-ils:before{content:""}.fa-meanpath:before{content:""}.fa,.rst-content .admonition-title,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content dl dt .headerlink,.icon,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context{font-family:inherit}.fa:before,.rst-content .admonition-title:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content dl dt .headerlink:before,.icon:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before{font-family:"FontAwesome";display:inline-block;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa,a .rst-content .admonition-title,.rst-content a .admonition-title,a .rst-content h1 .headerlink,.rst-content h1 a .headerlink,a .rst-content h2 .headerlink,.rst-content h2 a .headerlink,a .rst-content h3 .headerlink,.rst-content h3 a .headerlink,a .rst-content h4 .headerlink,.rst-content h4 a .headerlink,a .rst-content h5 .headerlink,.rst-content h5 a .headerlink,a .rst-content h6 .headerlink,.rst-content h6 a .headerlink,a .rst-content dl dt .headerlink,.rst-content dl dt a .headerlink,a .icon{display:inline-block;text-decoration:inherit}.btn .fa,.btn .rst-content .admonition-title,.rst-content .btn .admonition-title,.btn .rst-content h1 .headerlink,.rst-content h1 .btn .headerlink,.btn .rst-content h2 .headerlink,.rst-content h2 .btn .headerlink,.btn .rst-content h3 .headerlink,.rst-content h3 .btn .headerlink,.btn .rst-content h4 .headerlink,.rst-content h4 .btn .headerlink,.btn .rst-content h5 .headerlink,.rst-content h5 .btn .headerlink,.btn .rst-content h6 .headerlink,.rst-content h6 .btn .headerlink,.btn .rst-content dl dt .headerlink,.rst-content dl dt .btn .headerlink,.btn .icon,.nav .fa,.nav .rst-content .admonition-title,.rst-content .nav .admonition-title,.nav .rst-content h1 .headerlink,.rst-content h1 .nav .headerlink,.nav .rst-content h2 .headerlink,.rst-content h2 .nav .headerlink,.nav .rst-content h3 .headerlink,.rst-content h3 .nav .headerlink,.nav .rst-content h4 .headerlink,.rst-content h4 .nav .headerlink,.nav .rst-content h5 .headerlink,.rst-content h5 .nav .headerlink,.nav .rst-content h6 .headerlink,.rst-content h6 .nav .headerlink,.nav .rst-content dl dt .headerlink,.rst-content dl dt .nav .headerlink,.nav .icon{display:inline}.btn .fa.fa-large,.btn .rst-content .fa-large.admonition-title,.rst-content .btn .fa-large.admonition-title,.btn .rst-content h1 .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.btn .rst-content dl dt .fa-large.headerlink,.rst-content dl dt .btn .fa-large.headerlink,.btn .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .fa-large.admonition-title,.rst-content .nav .fa-large.admonition-title,.nav .rst-content h1 .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.nav .rst-content dl dt .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.nav .fa-large.icon{line-height:0.9em}.btn .fa.fa-spin,.btn .rst-content .fa-spin.admonition-title,.rst-content .btn .fa-spin.admonition-title,.btn .rst-content h1 .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.btn .rst-content dl dt .fa-spin.headerlink,.rst-content dl dt .btn .fa-spin.headerlink,.btn .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .fa-spin.admonition-title,.rst-content .nav .fa-spin.admonition-title,.nav .rst-content h1 .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.nav .rst-content dl dt .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.nav .fa-spin.icon{display:inline-block}.btn.fa:before,.rst-content .btn.admonition-title:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content dl dt .btn.headerlink:before,.btn.icon:before{opacity:0.5;-webkit-transition:opacity 0.05s ease-in;-moz-transition:opacity 0.05s ease-in;transition:opacity 0.05s ease-in}.btn.fa:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.btn.icon:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .rst-content .admonition-title:before,.rst-content .btn-mini .admonition-title:before,.btn-mini .rst-content h1 .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.btn-mini .rst-content dl dt .headerlink:before,.rst-content dl dt .btn-mini .headerlink:before,.btn-mini .icon:before{font-size:14px;vertical-align:-15%}.wy-alert,.rst-content .note,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .warning,.rst-content .seealso,.rst-content .admonition-todo{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.wy-alert-title,.rst-content .admonition-title{color:#fff;font-weight:bold;display:block;color:#fff;background:#6ab0de;margin:-12px;padding:6px 12px;margin-bottom:12px}.wy-alert.wy-alert-danger,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.admonition-todo{background:#fdf3f2}.wy-alert.wy-alert-danger .wy-alert-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .danger .wy-alert-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .danger .admonition-title,.rst-content .error .admonition-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title{background:#f29f97}.wy-alert.wy-alert-warning,.rst-content .wy-alert-warning.note,.rst-content .attention,.rst-content .caution,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.tip,.rst-content .warning,.rst-content .wy-alert-warning.seealso,.rst-content .admonition-todo{background:#ffedcc}.wy-alert.wy-alert-warning .wy-alert-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .attention .wy-alert-title,.rst-content .caution .wy-alert-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .admonition-todo .wy-alert-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .attention .admonition-title,.rst-content .caution .admonition-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .warning .admonition-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .admonition-todo .admonition-title{background:#f0b37e}.wy-alert.wy-alert-info,.rst-content .note,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.rst-content .seealso,.rst-content .wy-alert-info.admonition-todo{background:#e7f2fa}.wy-alert.wy-alert-info .wy-alert-title,.rst-content .note .wy-alert-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.rst-content .note .admonition-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .seealso .admonition-title,.rst-content .wy-alert-info.admonition-todo .admonition-title{background:#6ab0de}.wy-alert.wy-alert-success,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.warning,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.admonition-todo{background:#dbfaf4}.wy-alert.wy-alert-success .wy-alert-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .hint .wy-alert-title,.rst-content .important .wy-alert-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .hint .admonition-title,.rst-content .important .admonition-title,.rst-content .tip .admonition-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.admonition-todo .admonition-title{background:#1abc9c}.wy-alert.wy-alert-neutral,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.admonition-todo{background:#f3f6f6}.wy-alert.wy-alert-neutral .wy-alert-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .admonition-title{color:#404040;background:#e1e4e5}.wy-alert.wy-alert-neutral a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.admonition-todo a{color:#2980B9}.wy-alert p:last-child,.rst-content .note p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.rst-content .seealso p:last-child,.rst-content .admonition-todo p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0px;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,0.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all 0.3s ease-in;-moz-transition:all 0.3s ease-in;transition:all 0.3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27AE60}.wy-tray-container li.wy-tray-item-info{background:#2980B9}.wy-tray-container li.wy-tray-item-warning{background:#E67E22}.wy-tray-container li.wy-tray-item-danger{background:#E74C3C}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width: 768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px 12px;color:#fff;border:1px solid rgba(0,0,0,0.1);background-color:#27AE60;text-decoration:none;font-weight:normal;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;box-shadow:0px 1px 2px -1px rgba(255,255,255,0.5) inset,0px -2px 0px 0px rgba(0,0,0,0.1) inset;outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all 0.1s linear;-moz-transition:all 0.1s linear;transition:all 0.1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:0px -1px 0px 0px rgba(0,0,0,0.05) inset,0px 2px 0px 0px rgba(0,0,0,0.1) inset;padding:8px 12px 6px 12px}.btn:visited{color:#fff}.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:0.4;cursor:not-allowed;box-shadow:none}.btn-disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:0.4;cursor:not-allowed;box-shadow:none}.btn-disabled:hover,.btn-disabled:focus,.btn-disabled:active{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:0.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980B9 !important}.btn-info:hover{background-color:#2e8ece !important}.btn-neutral{background-color:#f3f6f6 !important;color:#404040 !important}.btn-neutral:hover{background-color:#e5ebeb !important;color:#404040}.btn-neutral:visited{color:#404040 !important}.btn-success{background-color:#27AE60 !important}.btn-success:hover{background-color:#295 !important}.btn-danger{background-color:#E74C3C !important}.btn-danger:hover{background-color:#ea6153 !important}.btn-warning{background-color:#E67E22 !important}.btn-warning:hover{background-color:#e98b39 !important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f !important}.btn-link{background-color:transparent !important;color:#2980B9;box-shadow:none;border-color:transparent !important}.btn-link:hover{background-color:transparent !important;color:#409ad5 !important;box-shadow:none}.btn-link:active{background-color:transparent !important;color:#409ad5 !important;box-shadow:none}.btn-link:visited{color:#9B59B6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:before,.wy-btn-group:after{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:solid 1px #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,0.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980B9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:solid 1px #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type="search"]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980B9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned input,.wy-form-aligned textarea,.wy-form-aligned select,.wy-form-aligned .wy-help-inline,.wy-form-aligned label{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{border:0;margin:0;padding:0}legend{display:block;width:100%;border:0;padding:0;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label{display:block;margin:0 0 0.3125em 0;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;*zoom:1;max-width:68em;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:before,.wy-control-group:after{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group:before,.wy-control-group:after{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#E74C3C}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full input[type="text"],.wy-control-group .wy-form-full input[type="password"],.wy-control-group .wy-form-full input[type="email"],.wy-control-group .wy-form-full input[type="url"],.wy-control-group .wy-form-full input[type="date"],.wy-control-group .wy-form-full input[type="month"],.wy-control-group .wy-form-full input[type="time"],.wy-control-group .wy-form-full input[type="datetime"],.wy-control-group .wy-form-full input[type="datetime-local"],.wy-control-group .wy-form-full input[type="week"],.wy-control-group .wy-form-full input[type="number"],.wy-control-group .wy-form-full input[type="search"],.wy-control-group .wy-form-full input[type="tel"],.wy-control-group .wy-form-full input[type="color"],.wy-control-group .wy-form-halves input[type="text"],.wy-control-group .wy-form-halves input[type="password"],.wy-control-group .wy-form-halves input[type="email"],.wy-control-group .wy-form-halves input[type="url"],.wy-control-group .wy-form-halves input[type="date"],.wy-control-group .wy-form-halves input[type="month"],.wy-control-group .wy-form-halves input[type="time"],.wy-control-group .wy-form-halves input[type="datetime"],.wy-control-group .wy-form-halves input[type="datetime-local"],.wy-control-group .wy-form-halves input[type="week"],.wy-control-group .wy-form-halves input[type="number"],.wy-control-group .wy-form-halves input[type="search"],.wy-control-group .wy-form-halves input[type="tel"],.wy-control-group .wy-form-halves input[type="color"],.wy-control-group .wy-form-thirds input[type="text"],.wy-control-group .wy-form-thirds input[type="password"],.wy-control-group .wy-form-thirds input[type="email"],.wy-control-group .wy-form-thirds input[type="url"],.wy-control-group .wy-form-thirds input[type="date"],.wy-control-group .wy-form-thirds input[type="month"],.wy-control-group .wy-form-thirds input[type="time"],.wy-control-group .wy-form-thirds input[type="datetime"],.wy-control-group .wy-form-thirds input[type="datetime-local"],.wy-control-group .wy-form-thirds input[type="week"],.wy-control-group .wy-form-thirds input[type="number"],.wy-control-group .wy-form-thirds input[type="search"],.wy-control-group .wy-form-thirds input[type="tel"],.wy-control-group .wy-form-thirds input[type="color"]{width:100%}.wy-control-group .wy-form-full{float:left;display:block;margin-right:2.35765%;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child{margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(2n+1){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child{margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control{margin:6px 0 0 0;font-size:90%}.wy-control-no-input{display:inline-block;margin:6px 0 0 0;font-size:90%}.wy-control-group.fluid-input input[type="text"],.wy-control-group.fluid-input input[type="password"],.wy-control-group.fluid-input input[type="email"],.wy-control-group.fluid-input input[type="url"],.wy-control-group.fluid-input input[type="date"],.wy-control-group.fluid-input input[type="month"],.wy-control-group.fluid-input input[type="time"],.wy-control-group.fluid-input input[type="datetime"],.wy-control-group.fluid-input input[type="datetime-local"],.wy-control-group.fluid-input input[type="week"],.wy-control-group.fluid-input input[type="number"],.wy-control-group.fluid-input input[type="search"],.wy-control-group.fluid-input input[type="tel"],.wy-control-group.fluid-input input[type="color"]{width:100%}.wy-form-message-inline{display:inline-block;padding-left:0.3em;color:#666;vertical-align:middle;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:0.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type="button"],input[type="reset"],input[type="submit"]{-webkit-appearance:button;cursor:pointer;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;*overflow:visible}input[type="text"],input[type="password"],input[type="email"],input[type="url"],input[type="date"],input[type="month"],input[type="time"],input[type="datetime"],input[type="datetime-local"],input[type="week"],input[type="number"],input[type="search"],input[type="tel"],input[type="color"]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border 0.3s linear;-moz-transition:border 0.3s linear;transition:border 0.3s linear}input[type="datetime-local"]{padding:0.34375em 0.625em}input[disabled]{cursor:default}input[type="checkbox"],input[type="radio"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0;margin-right:0.3125em;*height:13px;*width:13px}input[type="search"]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}input[type="text"]:focus,input[type="password"]:focus,input[type="email"]:focus,input[type="url"]:focus,input[type="date"]:focus,input[type="month"]:focus,input[type="time"]:focus,input[type="datetime"]:focus,input[type="datetime-local"]:focus,input[type="week"]:focus,input[type="number"]:focus,input[type="search"]:focus,input[type="tel"]:focus,input[type="color"]:focus{outline:0;outline:thin dotted \9;border-color:#333}input.no-focus:focus{border-color:#ccc !important}input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted #333;outline:1px auto #129FEA}input[type="text"][disabled],input[type="password"][disabled],input[type="email"][disabled],input[type="url"][disabled],input[type="date"][disabled],input[type="month"][disabled],input[type="time"][disabled],input[type="datetime"][disabled],input[type="datetime-local"][disabled],input[type="week"][disabled],input[type="number"][disabled],input[type="search"][disabled],input[type="tel"][disabled],input[type="color"][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,textarea:focus:invalid,select:focus:invalid{color:#E74C3C;border:1px solid #E74C3C}input:focus:invalid:focus,textarea:focus:invalid:focus,select:focus:invalid:focus{border-color:#E74C3C}input[type="file"]:focus:invalid:focus,input[type="radio"]:focus:invalid:focus,input[type="checkbox"]:focus:invalid:focus{outline-color:#E74C3C}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif}select,textarea{padding:0.5em 0.625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border 0.3s linear;-moz-transition:border 0.3s linear;transition:border 0.3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type="radio"][disabled],input[type="checkbox"][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:solid 1px #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{width:36px;height:12px;margin:12px 0;position:relative;border-radius:4px;background:#ccc;cursor:pointer;-webkit-transition:all 0.2s ease-in-out;-moz-transition:all 0.2s ease-in-out;transition:all 0.2s ease-in-out}.wy-switch:before{position:absolute;content:"";display:block;width:18px;height:18px;border-radius:4px;background:#999;left:-3px;top:-3px;-webkit-transition:all 0.2s ease-in-out;-moz-transition:all 0.2s ease-in-out;transition:all 0.2s ease-in-out}.wy-switch:after{content:"false";position:absolute;left:48px;display:block;font-size:12px;color:#ccc}.wy-switch.active{background:#1e8449}.wy-switch.active:before{left:24px;background:#27AE60}.wy-switch.active:after{content:"true"}.wy-switch.disabled,.wy-switch.active.disabled{cursor:not-allowed}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#E74C3C}.wy-control-group.wy-control-group-error input[type="text"],.wy-control-group.wy-control-group-error input[type="password"],.wy-control-group.wy-control-group-error input[type="email"],.wy-control-group.wy-control-group-error input[type="url"],.wy-control-group.wy-control-group-error input[type="date"],.wy-control-group.wy-control-group-error input[type="month"],.wy-control-group.wy-control-group-error input[type="time"],.wy-control-group.wy-control-group-error input[type="datetime"],.wy-control-group.wy-control-group-error input[type="datetime-local"],.wy-control-group.wy-control-group-error input[type="week"],.wy-control-group.wy-control-group-error input[type="number"],.wy-control-group.wy-control-group-error input[type="search"],.wy-control-group.wy-control-group-error input[type="tel"],.wy-control-group.wy-control-group-error input[type="color"]{border:solid 1px #E74C3C}.wy-control-group.wy-control-group-error textarea{border:solid 1px #E74C3C}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:0.5em 0.625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27AE60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#E74C3C}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#E67E22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980B9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width: 480px){.wy-form button[type="submit"]{margin:0.7em 0 0}.wy-form input[type="text"],.wy-form input[type="password"],.wy-form input[type="email"],.wy-form input[type="url"],.wy-form input[type="date"],.wy-form input[type="month"],.wy-form input[type="time"],.wy-form input[type="datetime"],.wy-form input[type="datetime-local"],.wy-form input[type="week"],.wy-form input[type="number"],.wy-form input[type="search"],.wy-form input[type="tel"],.wy-form input[type="color"]{margin-bottom:0.3em;display:block}.wy-form label{margin-bottom:0.3em;display:block}.wy-form input[type="password"],.wy-form input[type="email"],.wy-form input[type="url"],.wy-form input[type="date"],.wy-form input[type="month"],.wy-form input[type="time"],.wy-form input[type="datetime"],.wy-form input[type="datetime-local"],.wy-form input[type="week"],.wy-form input[type="number"],.wy-form input[type="search"],.wy-form input[type="tel"],.wy-form input[type="color"]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:0.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0 0}.wy-form .wy-help-inline,.wy-form-message-inline,.wy-form-message{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width: 768px){.tablet-hide{display:none}}@media screen and (max-width: 480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.wy-table,.rst-content table.docutils,.rst-content table.field-list{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.wy-table caption,.rst-content table.docutils caption,.rst-content table.field-list caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.wy-table td,.rst-content table.docutils td,.rst-content table.field-list td,.wy-table th,.rst-content table.docutils th,.rst-content table.field-list th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.wy-table td:first-child,.rst-content table.docutils td:first-child,.rst-content table.field-list td:first-child,.wy-table th:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list th:first-child{border-left-width:0}.wy-table thead,.rst-content table.docutils thead,.rst-content table.field-list thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.wy-table thead th,.rst-content table.docutils thead th,.rst-content table.field-list thead th{font-weight:bold;border-bottom:solid 2px #e1e4e5}.wy-table td,.rst-content table.docutils td,.rst-content table.field-list td{background-color:transparent;vertical-align:middle}.wy-table td p,.rst-content table.docutils td p,.rst-content table.field-list td p{line-height:18px}.wy-table td p:last-child,.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child{margin-bottom:0}.wy-table .wy-table-cell-min,.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min{width:1%;padding-right:0}.wy-table .wy-table-cell-min input[type=checkbox],.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox],.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:gray;font-size:90%}.wy-table-tertiary{color:gray;font-size:80%}.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td,.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td{background-color:#f3f6f6}.wy-table-backed{background-color:#f3f6f6}.wy-table-bordered-all,.rst-content table.docutils{border:1px solid #e1e4e5}.wy-table-bordered-all td,.rst-content table.docutils td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.wy-table-bordered-all tbody>tr:last-child td,.rst-content table.docutils tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px 0;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0 !important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980B9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9B59B6}html{height:100%;overflow-x:hidden}body{font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;font-weight:normal;color:#404040;min-height:100%;overflow-x:hidden;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#E67E22 !important}a.wy-text-warning:hover{color:#eb9950 !important}.wy-text-info{color:#2980B9 !important}a.wy-text-info:hover{color:#409ad5 !important}.wy-text-success{color:#27AE60 !important}a.wy-text-success:hover{color:#36d278 !important}.wy-text-danger{color:#E74C3C !important}a.wy-text-danger:hover{color:#ed7669 !important}.wy-text-neutral{color:#404040 !important}a.wy-text-neutral:hover{color:#595959 !important}h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:"Roboto Slab","ff-tisa-web-pro","Georgia",Arial,sans-serif}p{line-height:24px;margin:0;font-size:16px;margin-bottom:24px}h1{font-size:175%}h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}code,.rst-content tt{white-space:nowrap;max-width:100%;background:#fff;border:solid 1px #e1e4e5;font-size:75%;padding:0 5px;font-family:Consolas,"Andale Mono WT","Andale Mono","Lucida Console","Lucida Sans Typewriter","DejaVu Sans Mono","Bitstream Vera Sans Mono","Liberation Mono","Nimbus Mono L",Monaco,"Courier New",Courier,monospace;color:#E74C3C;overflow-x:auto}code.code-large,.rst-content tt.code-large{font-size:90%}.wy-plain-list-disc,.rst-content .section ul,.rst-content .toctree-wrapper ul,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.wy-plain-list-disc li,.rst-content .section ul li,.rst-content .toctree-wrapper ul li,article ul li{list-style:disc;margin-left:24px}.wy-plain-list-disc li p:last-child,.rst-content .section ul li p:last-child,.rst-content .toctree-wrapper ul li p:last-child,article ul li p:last-child{margin-bottom:0}.wy-plain-list-disc li ul,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li ul,article ul li ul{margin-bottom:0}.wy-plain-list-disc li li,.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,article ul li li{list-style:circle}.wy-plain-list-disc li li li,.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,article ul li li li{list-style:square}.wy-plain-list-disc li ol li,.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,article ul li ol li{list-style:decimal}.wy-plain-list-decimal,.rst-content .section ol,.rst-content ol.arabic,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.wy-plain-list-decimal li,.rst-content .section ol li,.rst-content ol.arabic li,article ol li{list-style:decimal;margin-left:24px}.wy-plain-list-decimal li p:last-child,.rst-content .section ol li p:last-child,.rst-content ol.arabic li p:last-child,article ol li p:last-child{margin-bottom:0}.wy-plain-list-decimal li ul,.rst-content .section ol li ul,.rst-content ol.arabic li ul,article ol li ul{margin-bottom:0}.wy-plain-list-decimal li ul li,.rst-content .section ol li ul li,.rst-content ol.arabic li ul li,article ol li ul li{list-style:disc}.codeblock-example{border:1px solid #e1e4e5;border-bottom:none;padding:24px;padding-top:48px;font-weight:500;background:#fff;position:relative}.codeblock-example:after{content:"Example";position:absolute;top:0px;left:0px;background:#9B59B6;color:#fff;padding:6px 12px}.codeblock-example.prettyprint-example-only{border:1px solid #e1e4e5;margin-bottom:24px}.codeblock,pre.literal-block,.rst-content .literal-block,.rst-content pre.literal-block,div[class^='highlight']{border:1px solid #e1e4e5;padding:0px;overflow-x:auto;background:#fff;margin:1px 0 24px 0}.codeblock div[class^='highlight'],pre.literal-block div[class^='highlight'],.rst-content .literal-block div[class^='highlight'],div[class^='highlight'] div[class^='highlight']{border:none;background:none;margin:0}div[class^='highlight'] td.code{width:100%}.linenodiv pre{border-right:solid 1px #e6e9ea;margin:0;padding:12px 12px;font-family:Consolas,"Andale Mono WT","Andale Mono","Lucida Console","Lucida Sans Typewriter","DejaVu Sans Mono","Bitstream Vera Sans Mono","Liberation Mono","Nimbus Mono L",Monaco,"Courier New",Courier,monospace;font-size:12px;line-height:1.5;color:#d9d9d9}div[class^='highlight'] pre{white-space:pre;margin:0;padding:12px 12px;font-family:Consolas,"Andale Mono WT","Andale Mono","Lucida Console","Lucida Sans Typewriter","DejaVu Sans Mono","Bitstream Vera Sans Mono","Liberation Mono","Nimbus Mono L",Monaco,"Courier New",Courier,monospace;font-size:12px;line-height:1.5;display:block;overflow:auto;color:#404040}@media print{.codeblock,pre.literal-block,.rst-content .literal-block,.rst-content pre.literal-block,div[class^='highlight'],div[class^='highlight'] pre{white-space:pre-wrap}}.hll{background-color:#ffc;margin:0 -12px;padding:0 12px;display:block}.c{color:#998;font-style:italic}.err{color:#a61717;background-color:#e3d2d2}.k{font-weight:bold}.o{font-weight:bold}.cm{color:#998;font-style:italic}.cp{color:#999;font-weight:bold}.c1{color:#998;font-style:italic}.cs{color:#999;font-weight:bold;font-style:italic}.gd{color:#000;background-color:#fdd}.gd .x{color:#000;background-color:#faa}.ge{font-style:italic}.gr{color:#a00}.gh{color:#999}.gi{color:#000;background-color:#dfd}.gi .x{color:#000;background-color:#afa}.go{color:#888}.gp{color:#555}.gs{font-weight:bold}.gu{color:purple;font-weight:bold}.gt{color:#a00}.kc{font-weight:bold}.kd{font-weight:bold}.kn{font-weight:bold}.kp{font-weight:bold}.kr{font-weight:bold}.kt{color:#458;font-weight:bold}.m{color:#099}.s{color:#d14}.n{color:#333}.na{color:teal}.nb{color:#0086b3}.nc{color:#458;font-weight:bold}.no{color:teal}.ni{color:purple}.ne{color:#900;font-weight:bold}.nf{color:#900;font-weight:bold}.nn{color:#555}.nt{color:navy}.nv{color:teal}.ow{font-weight:bold}.w{color:#bbb}.mf{color:#099}.mh{color:#099}.mi{color:#099}.mo{color:#099}.sb{color:#d14}.sc{color:#d14}.sd{color:#d14}.s2{color:#d14}.se{color:#d14}.sh{color:#d14}.si{color:#d14}.sx{color:#d14}.sr{color:#009926}.s1{color:#d14}.ss{color:#990073}.bp{color:#999}.vc{color:teal}.vg{color:teal}.vi{color:teal}.il{color:#099}.gc{color:#999;background-color:#EAF2F5}.wy-breadcrumbs li{display:inline-block}.wy-breadcrumbs li.wy-breadcrumbs-aside{float:right}.wy-breadcrumbs li a{display:inline-block;padding:5px}.wy-breadcrumbs li a:first-child{padding-left:0}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width: 480px){.wy-breadcrumbs-extra{display:none}.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:before,.wy-menu-horiz:after{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz ul,.wy-menu-horiz li{display:inline-block}.wy-menu-horiz li:hover{background:rgba(255,255,255,0.1)}.wy-menu-horiz li.divide-left{border-left:solid 1px #404040}.wy-menu-horiz li.divide-right{border-right:solid 1px #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical header{height:32px;display:inline-block;line-height:32px;padding:0 1.618em;display:block;font-weight:bold;text-transform:uppercase;font-size:80%;color:#2980B9;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:solid 1px #404040}.wy-menu-vertical li.divide-bottom{border-bottom:solid 1px #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:gray;border-right:solid 1px #c9c9c9;padding:0.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.wy-menu-vertical li.on a,.wy-menu-vertical li.current>a{color:#404040;padding:0.4045em 1.618em;font-weight:bold;position:relative;background:#fcfcfc;border:none;border-bottom:solid 1px #c9c9c9;border-top:solid 1px #c9c9c9;padding-left:1.618em -4px}.wy-menu-vertical li.on a:hover,.wy-menu-vertical li.current>a:hover{background:#fcfcfc}.wy-menu-vertical li.toctree-l2.current>a{background:#c9c9c9;padding:0.4045em 2.427em}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical .local-toc li ul{display:block}.wy-menu-vertical li ul li a{margin-bottom:0;color:#b3b3b3;font-weight:normal}.wy-menu-vertical a{display:inline-block;line-height:18px;padding:0.4045em 1.618em;display:block;position:relative;font-size:90%;color:#b3b3b3}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:active{background-color:#2980B9;cursor:pointer;color:#fff}.wy-side-nav-search{z-index:200;background-color:#2980B9;text-align:center;padding:0.809em;display:block;color:#fcfcfc;margin-bottom:0.809em}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto 0.809em auto;height:45px;width:45px;background-color:#2980B9;padding:5px;border-radius:100%}.wy-side-nav-search>a,.wy-side-nav-search .wy-dropdown>a{color:#fcfcfc;font-size:100%;font-weight:bold;display:inline-block;padding:4px 6px;margin-bottom:0.809em}.wy-side-nav-search>a:hover,.wy-side-nav-search .wy-dropdown>a:hover{background:rgba(255,255,255,0.1)}.wy-nav .wy-menu-vertical header{color:#2980B9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980B9;color:#fff}[data-menu-wrap]{-webkit-transition:all 0.2s ease-in;-moz-transition:all 0.2s ease-in;transition:all 0.2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:left repeat-y #fcfcfc;background-image:url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDoxOERBMTRGRDBFMUUxMUUzODUwMkJCOThDMEVFNURFMCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDoxOERBMTRGRTBFMUUxMUUzODUwMkJCOThDMEVFNURFMCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjE4REExNEZCMEUxRTExRTM4NTAyQkI5OEMwRUU1REUwIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjE4REExNEZDMEUxRTExRTM4NTAyQkI5OEMwRUU1REUwIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+EwrlwAAAAA5JREFUeNpiMDU0BAgwAAE2AJgB9BnaAAAAAElFTkSuQmCC);background-size:300px 1px}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:absolute;top:0;left:0;width:300px;overflow:hidden;min-height:100%;background:#343131;z-index:200}.wy-nav-top{display:none;background:#2980B9;color:#fff;padding:0.4045em 0.809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:before,.wy-nav-top:after{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:bold}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980B9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,0.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:#999}footer p{margin-bottom:12px}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:before,.rst-footer-buttons:after{display:table;content:""}.rst-footer-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:solid 1px #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:solid 1px #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:gray;font-size:90%}@media screen and (max-width: 768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width: 1400px){.wy-nav-content-wrap{background:rgba(0,0,0,0.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,footer,.wy-nav-side{display:none}.wy-nav-content-wrap{margin-left:0}}nav.stickynav{position:fixed;top:0}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-versions .rst-current-version .icon{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}}.rst-content img{max-width:100%;height:auto !important}.rst-content div.figure{margin-bottom:24px}.rst-content div.figure.align-center{text-align:center}.rst-content .section>img,.rst-content .section>a>img{margin-bottom:24px}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content .note .last,.rst-content .attention .last,.rst-content .caution .last,.rst-content .danger .last,.rst-content .error .last,.rst-content .hint .last,.rst-content .important .last,.rst-content .tip .last,.rst-content .warning .last,.rst-content .seealso .last,.rst-content .admonition-todo .last{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,0.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent !important;border-color:rgba(0,0,0,0.1) !important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha li{list-style:upper-alpha}.rst-content .section ol p,.rst-content .section ul p{margin-bottom:12px}.rst-content .line-block{margin-left:24px}.rst-content .topic-title{font-weight:bold;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0px 0px 24px 24px}.rst-content .align-left{float:left;margin:0px 24px 24px 0px}.rst-content .align-center{margin:auto;display:block}.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content dl dt .headerlink{display:none;visibility:hidden;font-size:14px}.rst-content h1 .headerlink:after,.rst-content h2 .headerlink:after,.rst-content h3 .headerlink:after,.rst-content h4 .headerlink:after,.rst-content h5 .headerlink:after,.rst-content h6 .headerlink:after,.rst-content dl dt .headerlink:after{visibility:visible;content:"";font-family:FontAwesome;display:inline-block}.rst-content h1:hover .headerlink,.rst-content h2:hover .headerlink,.rst-content h3:hover .headerlink,.rst-content h4:hover .headerlink,.rst-content h5:hover .headerlink,.rst-content h6:hover .headerlink,.rst-content dl dt:hover .headerlink{display:inline-block}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:solid 1px #e1e4e5}.rst-content .sidebar p,.rst-content .sidebar ul,.rst-content .sidebar dl{font-size:90%}.rst-content .sidebar .last{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:"Roboto Slab","ff-tisa-web-pro","Georgia",Arial,sans-serif;font-weight:bold;background:#e1e4e5;padding:6px 12px;margin:-24px;margin-bottom:24px;font-size:100%}.rst-content .highlighted{background:#F1C40F;display:inline-block;font-weight:bold;padding:0 6px}.rst-content .footnote-reference,.rst-content .citation-reference{vertical-align:super;font-size:90%}.rst-content table.docutils.citation,.rst-content table.docutils.footnote{background:none;border:none;color:#999}.rst-content table.docutils.citation td,.rst-content table.docutils.citation tr,.rst-content table.docutils.footnote td,.rst-content table.docutils.footnote tr{border:none;background-color:transparent !important;white-space:normal}.rst-content table.docutils.citation td.label,.rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}.rst-content table.field-list{border:none}.rst-content table.field-list td{border:none;padding-top:5px}.rst-content table.field-list td>strong{display:inline-block;margin-top:3px}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left;padding-left:0}.rst-content tt{color:#000}.rst-content tt big,.rst-content tt em{font-size:100% !important;line-height:normal}.rst-content tt .xref,a .rst-content tt{font-weight:bold}.rst-content a tt{color:#2980B9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:bold}.rst-content dl p,.rst-content dl table,.rst-content dl ul,.rst-content dl ol{margin-bottom:12px !important}.rst-content dl dd{margin:0 0 12px 24px}.rst-content dl:not(.docutils){margin-bottom:24px}.rst-content dl:not(.docutils) dt{display:inline-block;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980B9;border-top:solid 3px #6ab0de;padding:6px;position:relative}.rst-content dl:not(.docutils) dt:before{color:#6ab0de}.rst-content dl:not(.docutils) dt .headerlink{color:#404040;font-size:100% !important}.rst-content dl:not(.docutils) dl dt{margin-bottom:6px;border:none;border-left:solid 3px #ccc;background:#f0f0f0;color:gray}.rst-content dl:not(.docutils) dl dt .headerlink{color:#404040;font-size:100% !important}.rst-content dl:not(.docutils) dt:first-child{margin-top:0}.rst-content dl:not(.docutils) tt{font-weight:bold}.rst-content dl:not(.docutils) tt.descname,.rst-content dl:not(.docutils) tt.descclassname{background-color:transparent;border:none;padding:0;font-size:100% !important}.rst-content dl:not(.docutils) tt.descname{font-weight:bold}.rst-content dl:not(.docutils) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:bold}.rst-content dl:not(.docutils) .property{display:inline-block;padding-right:8px}.rst-content .viewcode-link,.rst-content .viewcode-back{display:inline-block;color:#27AE60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:bold}@media screen and (max-width: 480px){.rst-content .sidebar{width:100%}}span[id*='MathJax-Span']{color:#404040}.math{text-align:center} +/*# sourceMappingURL=theme.css.map */ diff --git a/docs/_build/html/_static/doctools.js b/docs/_build/html/_static/doctools.js new file mode 100644 index 0000000..7d88f80 --- /dev/null +++ b/docs/_build/html/_static/doctools.js @@ -0,0 +1,316 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for all documentation. + * + * :copyright: Copyright 2007-2020 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + +/** + * make the code below compatible with browsers without + * an installed firebug like debugger +if (!window.console || !console.firebug) { + var names = ["log", "debug", "info", "warn", "error", "assert", "dir", + "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", + "profile", "profileEnd"]; + window.console = {}; + for (var i = 0; i < names.length; ++i) + window.console[names[i]] = function() {}; +} + */ + +/** + * small helper function to urldecode strings + */ +jQuery.urldecode = function(x) { + return decodeURIComponent(x).replace(/\+/g, ' '); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} + +/** + * Small JavaScript module for the documentation. + */ +var Documentation = { + + init : function() { + this.fixFirefoxAnchorBug(); + this.highlightSearchWords(); + this.initIndexTable(); + if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) { + this.initOnKeyListeners(); + } + }, + + /** + * i18n support + */ + TRANSLATIONS : {}, + PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, + LOCALE : 'unknown', + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext : function(string) { + var translated = Documentation.TRANSLATIONS[string]; + if (typeof translated === 'undefined') + return string; + return (typeof translated === 'string') ? translated : translated[0]; + }, + + ngettext : function(singular, plural, n) { + var translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated === 'undefined') + return (n == 1) ? singular : plural; + return translated[Documentation.PLURALEXPR(n)]; + }, + + addTranslations : function(catalog) { + for (var key in catalog.messages) + this.TRANSLATIONS[key] = catalog.messages[key]; + this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); + this.LOCALE = catalog.locale; + }, + + /** + * add context elements like header anchor links + */ + addContextElements : function() { + $('div[id] > :header:first').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this headline')). + appendTo(this); + }); + $('dt[id]').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this definition')). + appendTo(this); + }); + }, + + /** + * workaround a firefox stupidity + * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 + */ + fixFirefoxAnchorBug : function() { + if (document.location.hash && $.browser.mozilla) + window.setTimeout(function() { + document.location.href += ''; + }, 10); + }, + + /** + * highlight the search words provided in the url in the text + */ + highlightSearchWords : function() { + var params = $.getQueryParameters(); + var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; + if (terms.length) { + var body = $('div.body'); + if (!body.length) { + body = $('body'); + } + window.setTimeout(function() { + $.each(terms, function() { + body.highlightText(this.toLowerCase(), 'highlighted'); + }); + }, 10); + $('') + .appendTo($('#searchbox')); + } + }, + + /** + * init the domain index toggle buttons + */ + initIndexTable : function() { + var togglers = $('img.toggler').click(function() { + var src = $(this).attr('src'); + var idnum = $(this).attr('id').substr(7); + $('tr.cg-' + idnum).toggle(); + if (src.substr(-9) === 'minus.png') + $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); + else + $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); + }).css('display', ''); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { + togglers.click(); + } + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords : function() { + $('#searchbox .highlight-link').fadeOut(300); + $('span.highlighted').removeClass('highlighted'); + }, + + /** + * make the url absolute + */ + makeURL : function(relativeURL) { + return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; + }, + + /** + * get the current relative url + */ + getCurrentURL : function() { + var path = document.location.pathname; + var parts = path.split(/\//); + $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { + if (this === '..') + parts.pop(); + }); + var url = parts.join('/'); + return path.substring(url.lastIndexOf('/') + 1, path.length - 1); + }, + + initOnKeyListeners: function() { + $(document).keydown(function(event) { + var activeElementType = document.activeElement.tagName; + // don't navigate when in search box, textarea, dropdown or button + if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT' + && activeElementType !== 'BUTTON' && !event.altKey && !event.ctrlKey && !event.metaKey + && !event.shiftKey) { + switch (event.keyCode) { + case 37: // left + var prevHref = $('link[rel="prev"]').prop('href'); + if (prevHref) { + window.location.href = prevHref; + return false; + } + case 39: // right + var nextHref = $('link[rel="next"]').prop('href'); + if (nextHref) { + window.location.href = nextHref; + return false; + } + } + } + }); + } +}; + +// quick alias for translations +_ = Documentation.gettext; + +$(document).ready(function() { + Documentation.init(); +}); diff --git a/docs/_build/html/_static/documentation_options.js b/docs/_build/html/_static/documentation_options.js new file mode 100644 index 0000000..a7d0fd2 --- /dev/null +++ b/docs/_build/html/_static/documentation_options.js @@ -0,0 +1,12 @@ +var DOCUMENTATION_OPTIONS = { + URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), + VERSION: '2.0', + LANGUAGE: 'None', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false +}; \ No newline at end of file diff --git a/docs/_build/html/_static/file.png b/docs/_build/html/_static/file.png new file mode 100644 index 0000000..a858a41 Binary files /dev/null and b/docs/_build/html/_static/file.png differ diff --git a/docs/_build/html/_static/fonts/fontawesome-webfont.eot b/docs/_build/html/_static/fonts/fontawesome-webfont.eot new file mode 100644 index 0000000..7c79c6a Binary files /dev/null and b/docs/_build/html/_static/fonts/fontawesome-webfont.eot differ diff --git a/docs/_build/html/_static/fonts/fontawesome-webfont.svg b/docs/_build/html/_static/fonts/fontawesome-webfont.svg new file mode 100644 index 0000000..45fdf33 --- /dev/null +++ b/docs/_build/html/_static/fonts/fontawesome-webfont.svg @@ -0,0 +1,414 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/_static/fonts/fontawesome-webfont.ttf b/docs/_build/html/_static/fonts/fontawesome-webfont.ttf new file mode 100644 index 0000000..e89738d Binary files /dev/null and b/docs/_build/html/_static/fonts/fontawesome-webfont.ttf differ diff --git a/docs/_build/html/_static/fonts/fontawesome-webfont.woff b/docs/_build/html/_static/fonts/fontawesome-webfont.woff new file mode 100644 index 0000000..8c1748a Binary files /dev/null and b/docs/_build/html/_static/fonts/fontawesome-webfont.woff differ diff --git a/docs/_build/html/_static/img/porao-branco.png b/docs/_build/html/_static/img/porao-branco.png new file mode 100644 index 0000000..9aa13b4 Binary files /dev/null and b/docs/_build/html/_static/img/porao-branco.png differ diff --git a/docs/_build/html/_static/jquery-3.5.1.js b/docs/_build/html/_static/jquery-3.5.1.js new file mode 100644 index 0000000..77d0b37 --- /dev/null +++ b/docs/_build/html/_static/jquery-3.5.1.js @@ -0,0 +1,10872 @@ +/*! + * jQuery JavaScript Library v3.5.1 + * https://jquery.com/ + * + * Includes Sizzle.js + * https://sizzlejs.com/ + * + * Copyright JS Foundation and other contributors + * Released under the MIT license + * https://jquery.org/license + * + * Date: 2020-05-04T22:49Z + */ +( function( global, factory ) { + + "use strict"; + + if ( typeof module === "object" && typeof module.exports === "object" ) { + + // For CommonJS and CommonJS-like environments where a proper `window` + // is present, execute the factory and get jQuery. + // For environments that do not have a `window` with a `document` + // (such as Node.js), expose a factory as module.exports. + // This accentuates the need for the creation of a real `window`. + // e.g. var jQuery = require("jquery")(window); + // See ticket #14549 for more info. + module.exports = global.document ? + factory( global, true ) : + function( w ) { + if ( !w.document ) { + throw new Error( "jQuery requires a window with a document" ); + } + return factory( w ); + }; + } else { + factory( global ); + } + +// Pass this if window is not defined yet +} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) { + +// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1 +// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode +// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common +// enough that all such attempts are guarded in a try block. +"use strict"; + +var arr = []; + +var getProto = Object.getPrototypeOf; + +var slice = arr.slice; + +var flat = arr.flat ? function( array ) { + return arr.flat.call( array ); +} : function( array ) { + return arr.concat.apply( [], array ); +}; + + +var push = arr.push; + +var indexOf = arr.indexOf; + +var class2type = {}; + +var toString = class2type.toString; + +var hasOwn = class2type.hasOwnProperty; + +var fnToString = hasOwn.toString; + +var ObjectFunctionString = fnToString.call( Object ); + +var support = {}; + +var isFunction = function isFunction( obj ) { + + // Support: Chrome <=57, Firefox <=52 + // In some browsers, typeof returns "function" for HTML elements + // (i.e., `typeof document.createElement( "object" ) === "function"`). + // We don't want to classify *any* DOM node as a function. + return typeof obj === "function" && typeof obj.nodeType !== "number"; + }; + + +var isWindow = function isWindow( obj ) { + return obj != null && obj === obj.window; + }; + + +var document = window.document; + + + + var preservedScriptAttributes = { + type: true, + src: true, + nonce: true, + noModule: true + }; + + function DOMEval( code, node, doc ) { + doc = doc || document; + + var i, val, + script = doc.createElement( "script" ); + + script.text = code; + if ( node ) { + for ( i in preservedScriptAttributes ) { + + // Support: Firefox 64+, Edge 18+ + // Some browsers don't support the "nonce" property on scripts. + // On the other hand, just using `getAttribute` is not enough as + // the `nonce` attribute is reset to an empty string whenever it + // becomes browsing-context connected. + // See https://github.com/whatwg/html/issues/2369 + // See https://html.spec.whatwg.org/#nonce-attributes + // The `node.getAttribute` check was added for the sake of + // `jQuery.globalEval` so that it can fake a nonce-containing node + // via an object. + val = node[ i ] || node.getAttribute && node.getAttribute( i ); + if ( val ) { + script.setAttribute( i, val ); + } + } + } + doc.head.appendChild( script ).parentNode.removeChild( script ); + } + + +function toType( obj ) { + if ( obj == null ) { + return obj + ""; + } + + // Support: Android <=2.3 only (functionish RegExp) + return typeof obj === "object" || typeof obj === "function" ? + class2type[ toString.call( obj ) ] || "object" : + typeof obj; +} +/* global Symbol */ +// Defining this global in .eslintrc.json would create a danger of using the global +// unguarded in another place, it seems safer to define global only for this module + + + +var + version = "3.5.1", + + // Define a local copy of jQuery + jQuery = function( selector, context ) { + + // The jQuery object is actually just the init constructor 'enhanced' + // Need init if jQuery is called (just allow error to be thrown if not included) + return new jQuery.fn.init( selector, context ); + }; + +jQuery.fn = jQuery.prototype = { + + // The current version of jQuery being used + jquery: version, + + constructor: jQuery, + + // The default length of a jQuery object is 0 + length: 0, + + toArray: function() { + return slice.call( this ); + }, + + // Get the Nth element in the matched element set OR + // Get the whole matched element set as a clean array + get: function( num ) { + + // Return all the elements in a clean array + if ( num == null ) { + return slice.call( this ); + } + + // Return just the one element from the set + return num < 0 ? this[ num + this.length ] : this[ num ]; + }, + + // Take an array of elements and push it onto the stack + // (returning the new matched element set) + pushStack: function( elems ) { + + // Build a new jQuery matched element set + var ret = jQuery.merge( this.constructor(), elems ); + + // Add the old object onto the stack (as a reference) + ret.prevObject = this; + + // Return the newly-formed element set + return ret; + }, + + // Execute a callback for every element in the matched set. + each: function( callback ) { + return jQuery.each( this, callback ); + }, + + map: function( callback ) { + return this.pushStack( jQuery.map( this, function( elem, i ) { + return callback.call( elem, i, elem ); + } ) ); + }, + + slice: function() { + return this.pushStack( slice.apply( this, arguments ) ); + }, + + first: function() { + return this.eq( 0 ); + }, + + last: function() { + return this.eq( -1 ); + }, + + even: function() { + return this.pushStack( jQuery.grep( this, function( _elem, i ) { + return ( i + 1 ) % 2; + } ) ); + }, + + odd: function() { + return this.pushStack( jQuery.grep( this, function( _elem, i ) { + return i % 2; + } ) ); + }, + + eq: function( i ) { + var len = this.length, + j = +i + ( i < 0 ? len : 0 ); + return this.pushStack( j >= 0 && j < len ? [ this[ j ] ] : [] ); + }, + + end: function() { + return this.prevObject || this.constructor(); + }, + + // For internal use only. + // Behaves like an Array's method, not like a jQuery method. + push: push, + sort: arr.sort, + splice: arr.splice +}; + +jQuery.extend = jQuery.fn.extend = function() { + var options, name, src, copy, copyIsArray, clone, + target = arguments[ 0 ] || {}, + i = 1, + length = arguments.length, + deep = false; + + // Handle a deep copy situation + if ( typeof target === "boolean" ) { + deep = target; + + // Skip the boolean and the target + target = arguments[ i ] || {}; + i++; + } + + // Handle case when target is a string or something (possible in deep copy) + if ( typeof target !== "object" && !isFunction( target ) ) { + target = {}; + } + + // Extend jQuery itself if only one argument is passed + if ( i === length ) { + target = this; + i--; + } + + for ( ; i < length; i++ ) { + + // Only deal with non-null/undefined values + if ( ( options = arguments[ i ] ) != null ) { + + // Extend the base object + for ( name in options ) { + copy = options[ name ]; + + // Prevent Object.prototype pollution + // Prevent never-ending loop + if ( name === "__proto__" || target === copy ) { + continue; + } + + // Recurse if we're merging plain objects or arrays + if ( deep && copy && ( jQuery.isPlainObject( copy ) || + ( copyIsArray = Array.isArray( copy ) ) ) ) { + src = target[ name ]; + + // Ensure proper type for the source value + if ( copyIsArray && !Array.isArray( src ) ) { + clone = []; + } else if ( !copyIsArray && !jQuery.isPlainObject( src ) ) { + clone = {}; + } else { + clone = src; + } + copyIsArray = false; + + // Never move original objects, clone them + target[ name ] = jQuery.extend( deep, clone, copy ); + + // Don't bring in undefined values + } else if ( copy !== undefined ) { + target[ name ] = copy; + } + } + } + } + + // Return the modified object + return target; +}; + +jQuery.extend( { + + // Unique for each copy of jQuery on the page + expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ), + + // Assume jQuery is ready without the ready module + isReady: true, + + error: function( msg ) { + throw new Error( msg ); + }, + + noop: function() {}, + + isPlainObject: function( obj ) { + var proto, Ctor; + + // Detect obvious negatives + // Use toString instead of jQuery.type to catch host objects + if ( !obj || toString.call( obj ) !== "[object Object]" ) { + return false; + } + + proto = getProto( obj ); + + // Objects with no prototype (e.g., `Object.create( null )`) are plain + if ( !proto ) { + return true; + } + + // Objects with prototype are plain iff they were constructed by a global Object function + Ctor = hasOwn.call( proto, "constructor" ) && proto.constructor; + return typeof Ctor === "function" && fnToString.call( Ctor ) === ObjectFunctionString; + }, + + isEmptyObject: function( obj ) { + var name; + + for ( name in obj ) { + return false; + } + return true; + }, + + // Evaluates a script in a provided context; falls back to the global one + // if not specified. + globalEval: function( code, options, doc ) { + DOMEval( code, { nonce: options && options.nonce }, doc ); + }, + + each: function( obj, callback ) { + var length, i = 0; + + if ( isArrayLike( obj ) ) { + length = obj.length; + for ( ; i < length; i++ ) { + if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { + break; + } + } + } else { + for ( i in obj ) { + if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { + break; + } + } + } + + return obj; + }, + + // results is for internal usage only + makeArray: function( arr, results ) { + var ret = results || []; + + if ( arr != null ) { + if ( isArrayLike( Object( arr ) ) ) { + jQuery.merge( ret, + typeof arr === "string" ? + [ arr ] : arr + ); + } else { + push.call( ret, arr ); + } + } + + return ret; + }, + + inArray: function( elem, arr, i ) { + return arr == null ? -1 : indexOf.call( arr, elem, i ); + }, + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + merge: function( first, second ) { + var len = +second.length, + j = 0, + i = first.length; + + for ( ; j < len; j++ ) { + first[ i++ ] = second[ j ]; + } + + first.length = i; + + return first; + }, + + grep: function( elems, callback, invert ) { + var callbackInverse, + matches = [], + i = 0, + length = elems.length, + callbackExpect = !invert; + + // Go through the array, only saving the items + // that pass the validator function + for ( ; i < length; i++ ) { + callbackInverse = !callback( elems[ i ], i ); + if ( callbackInverse !== callbackExpect ) { + matches.push( elems[ i ] ); + } + } + + return matches; + }, + + // arg is for internal usage only + map: function( elems, callback, arg ) { + var length, value, + i = 0, + ret = []; + + // Go through the array, translating each of the items to their new values + if ( isArrayLike( elems ) ) { + length = elems.length; + for ( ; i < length; i++ ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + + // Go through every key on the object, + } else { + for ( i in elems ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + } + + // Flatten any nested arrays + return flat( ret ); + }, + + // A global GUID counter for objects + guid: 1, + + // jQuery.support is not used in Core but other projects attach their + // properties to it so it needs to exist. + support: support +} ); + +if ( typeof Symbol === "function" ) { + jQuery.fn[ Symbol.iterator ] = arr[ Symbol.iterator ]; +} + +// Populate the class2type map +jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ), +function( _i, name ) { + class2type[ "[object " + name + "]" ] = name.toLowerCase(); +} ); + +function isArrayLike( obj ) { + + // Support: real iOS 8.2 only (not reproducible in simulator) + // `in` check used to prevent JIT error (gh-2145) + // hasOwn isn't used here due to false negatives + // regarding Nodelist length in IE + var length = !!obj && "length" in obj && obj.length, + type = toType( obj ); + + if ( isFunction( obj ) || isWindow( obj ) ) { + return false; + } + + return type === "array" || length === 0 || + typeof length === "number" && length > 0 && ( length - 1 ) in obj; +} +var Sizzle = +/*! + * Sizzle CSS Selector Engine v2.3.5 + * https://sizzlejs.com/ + * + * Copyright JS Foundation and other contributors + * Released under the MIT license + * https://js.foundation/ + * + * Date: 2020-03-14 + */ +( function( window ) { +var i, + support, + Expr, + getText, + isXML, + tokenize, + compile, + select, + outermostContext, + sortInput, + hasDuplicate, + + // Local document vars + setDocument, + document, + docElem, + documentIsHTML, + rbuggyQSA, + rbuggyMatches, + matches, + contains, + + // Instance-specific data + expando = "sizzle" + 1 * new Date(), + preferredDoc = window.document, + dirruns = 0, + done = 0, + classCache = createCache(), + tokenCache = createCache(), + compilerCache = createCache(), + nonnativeSelectorCache = createCache(), + sortOrder = function( a, b ) { + if ( a === b ) { + hasDuplicate = true; + } + return 0; + }, + + // Instance methods + hasOwn = ( {} ).hasOwnProperty, + arr = [], + pop = arr.pop, + pushNative = arr.push, + push = arr.push, + slice = arr.slice, + + // Use a stripped-down indexOf as it's faster than native + // https://jsperf.com/thor-indexof-vs-for/5 + indexOf = function( list, elem ) { + var i = 0, + len = list.length; + for ( ; i < len; i++ ) { + if ( list[ i ] === elem ) { + return i; + } + } + return -1; + }, + + booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|" + + "ismap|loop|multiple|open|readonly|required|scoped", + + // Regular expressions + + // http://www.w3.org/TR/css3-selectors/#whitespace + whitespace = "[\\x20\\t\\r\\n\\f]", + + // https://www.w3.org/TR/css-syntax-3/#ident-token-diagram + identifier = "(?:\\\\[\\da-fA-F]{1,6}" + whitespace + + "?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+", + + // Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors + attributes = "\\[" + whitespace + "*(" + identifier + ")(?:" + whitespace + + + // Operator (capture 2) + "*([*^$|!~]?=)" + whitespace + + + // "Attribute values must be CSS identifiers [capture 5] + // or strings [capture 3 or capture 4]" + "*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|(" + identifier + "))|)" + + whitespace + "*\\]", + + pseudos = ":(" + identifier + ")(?:\\((" + + + // To reduce the number of selectors needing tokenize in the preFilter, prefer arguments: + // 1. quoted (capture 3; capture 4 or capture 5) + "('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|" + + + // 2. simple (capture 6) + "((?:\\\\.|[^\\\\()[\\]]|" + attributes + ")*)|" + + + // 3. anything else (capture 2) + ".*" + + ")\\)|)", + + // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter + rwhitespace = new RegExp( whitespace + "+", "g" ), + rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + + whitespace + "+$", "g" ), + + rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), + rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + + "*" ), + rdescend = new RegExp( whitespace + "|>" ), + + rpseudo = new RegExp( pseudos ), + ridentifier = new RegExp( "^" + identifier + "$" ), + + matchExpr = { + "ID": new RegExp( "^#(" + identifier + ")" ), + "CLASS": new RegExp( "^\\.(" + identifier + ")" ), + "TAG": new RegExp( "^(" + identifier + "|[*])" ), + "ATTR": new RegExp( "^" + attributes ), + "PSEUDO": new RegExp( "^" + pseudos ), + "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + + whitespace + "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + + whitespace + "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), + "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), + + // For use in libraries implementing .is() + // We use this for POS matching in `select` + "needsContext": new RegExp( "^" + whitespace + + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + whitespace + + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) + }, + + rhtml = /HTML$/i, + rinputs = /^(?:input|select|textarea|button)$/i, + rheader = /^h\d$/i, + + rnative = /^[^{]+\{\s*\[native \w/, + + // Easily-parseable/retrievable ID or TAG or CLASS selectors + rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, + + rsibling = /[+~]/, + + // CSS escapes + // http://www.w3.org/TR/CSS21/syndata.html#escaped-characters + runescape = new RegExp( "\\\\[\\da-fA-F]{1,6}" + whitespace + "?|\\\\([^\\r\\n\\f])", "g" ), + funescape = function( escape, nonHex ) { + var high = "0x" + escape.slice( 1 ) - 0x10000; + + return nonHex ? + + // Strip the backslash prefix from a non-hex escape sequence + nonHex : + + // Replace a hexadecimal escape sequence with the encoded Unicode code point + // Support: IE <=11+ + // For values outside the Basic Multilingual Plane (BMP), manually construct a + // surrogate pair + high < 0 ? + String.fromCharCode( high + 0x10000 ) : + String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); + }, + + // CSS string/identifier serialization + // https://drafts.csswg.org/cssom/#common-serializing-idioms + rcssescape = /([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g, + fcssescape = function( ch, asCodePoint ) { + if ( asCodePoint ) { + + // U+0000 NULL becomes U+FFFD REPLACEMENT CHARACTER + if ( ch === "\0" ) { + return "\uFFFD"; + } + + // Control characters and (dependent upon position) numbers get escaped as code points + return ch.slice( 0, -1 ) + "\\" + + ch.charCodeAt( ch.length - 1 ).toString( 16 ) + " "; + } + + // Other potentially-special ASCII characters get backslash-escaped + return "\\" + ch; + }, + + // Used for iframes + // See setDocument() + // Removing the function wrapper causes a "Permission Denied" + // error in IE + unloadHandler = function() { + setDocument(); + }, + + inDisabledFieldset = addCombinator( + function( elem ) { + return elem.disabled === true && elem.nodeName.toLowerCase() === "fieldset"; + }, + { dir: "parentNode", next: "legend" } + ); + +// Optimize for push.apply( _, NodeList ) +try { + push.apply( + ( arr = slice.call( preferredDoc.childNodes ) ), + preferredDoc.childNodes + ); + + // Support: Android<4.0 + // Detect silently failing push.apply + // eslint-disable-next-line no-unused-expressions + arr[ preferredDoc.childNodes.length ].nodeType; +} catch ( e ) { + push = { apply: arr.length ? + + // Leverage slice if possible + function( target, els ) { + pushNative.apply( target, slice.call( els ) ); + } : + + // Support: IE<9 + // Otherwise append directly + function( target, els ) { + var j = target.length, + i = 0; + + // Can't trust NodeList.length + while ( ( target[ j++ ] = els[ i++ ] ) ) {} + target.length = j - 1; + } + }; +} + +function Sizzle( selector, context, results, seed ) { + var m, i, elem, nid, match, groups, newSelector, + newContext = context && context.ownerDocument, + + // nodeType defaults to 9, since context defaults to document + nodeType = context ? context.nodeType : 9; + + results = results || []; + + // Return early from calls with invalid selector or context + if ( typeof selector !== "string" || !selector || + nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) { + + return results; + } + + // Try to shortcut find operations (as opposed to filters) in HTML documents + if ( !seed ) { + setDocument( context ); + context = context || document; + + if ( documentIsHTML ) { + + // If the selector is sufficiently simple, try using a "get*By*" DOM method + // (excepting DocumentFragment context, where the methods don't exist) + if ( nodeType !== 11 && ( match = rquickExpr.exec( selector ) ) ) { + + // ID selector + if ( ( m = match[ 1 ] ) ) { + + // Document context + if ( nodeType === 9 ) { + if ( ( elem = context.getElementById( m ) ) ) { + + // Support: IE, Opera, Webkit + // TODO: identify versions + // getElementById can match elements by name instead of ID + if ( elem.id === m ) { + results.push( elem ); + return results; + } + } else { + return results; + } + + // Element context + } else { + + // Support: IE, Opera, Webkit + // TODO: identify versions + // getElementById can match elements by name instead of ID + if ( newContext && ( elem = newContext.getElementById( m ) ) && + contains( context, elem ) && + elem.id === m ) { + + results.push( elem ); + return results; + } + } + + // Type selector + } else if ( match[ 2 ] ) { + push.apply( results, context.getElementsByTagName( selector ) ); + return results; + + // Class selector + } else if ( ( m = match[ 3 ] ) && support.getElementsByClassName && + context.getElementsByClassName ) { + + push.apply( results, context.getElementsByClassName( m ) ); + return results; + } + } + + // Take advantage of querySelectorAll + if ( support.qsa && + !nonnativeSelectorCache[ selector + " " ] && + ( !rbuggyQSA || !rbuggyQSA.test( selector ) ) && + + // Support: IE 8 only + // Exclude object elements + ( nodeType !== 1 || context.nodeName.toLowerCase() !== "object" ) ) { + + newSelector = selector; + newContext = context; + + // qSA considers elements outside a scoping root when evaluating child or + // descendant combinators, which is not what we want. + // In such cases, we work around the behavior by prefixing every selector in the + // list with an ID selector referencing the scope context. + // The technique has to be used as well when a leading combinator is used + // as such selectors are not recognized by querySelectorAll. + // Thanks to Andrew Dupont for this technique. + if ( nodeType === 1 && + ( rdescend.test( selector ) || rcombinators.test( selector ) ) ) { + + // Expand context for sibling selectors + newContext = rsibling.test( selector ) && testContext( context.parentNode ) || + context; + + // We can use :scope instead of the ID hack if the browser + // supports it & if we're not changing the context. + if ( newContext !== context || !support.scope ) { + + // Capture the context ID, setting it first if necessary + if ( ( nid = context.getAttribute( "id" ) ) ) { + nid = nid.replace( rcssescape, fcssescape ); + } else { + context.setAttribute( "id", ( nid = expando ) ); + } + } + + // Prefix every selector in the list + groups = tokenize( selector ); + i = groups.length; + while ( i-- ) { + groups[ i ] = ( nid ? "#" + nid : ":scope" ) + " " + + toSelector( groups[ i ] ); + } + newSelector = groups.join( "," ); + } + + try { + push.apply( results, + newContext.querySelectorAll( newSelector ) + ); + return results; + } catch ( qsaError ) { + nonnativeSelectorCache( selector, true ); + } finally { + if ( nid === expando ) { + context.removeAttribute( "id" ); + } + } + } + } + } + + // All others + return select( selector.replace( rtrim, "$1" ), context, results, seed ); +} + +/** + * Create key-value caches of limited size + * @returns {function(string, object)} Returns the Object data after storing it on itself with + * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) + * deleting the oldest entry + */ +function createCache() { + var keys = []; + + function cache( key, value ) { + + // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) + if ( keys.push( key + " " ) > Expr.cacheLength ) { + + // Only keep the most recent entries + delete cache[ keys.shift() ]; + } + return ( cache[ key + " " ] = value ); + } + return cache; +} + +/** + * Mark a function for special use by Sizzle + * @param {Function} fn The function to mark + */ +function markFunction( fn ) { + fn[ expando ] = true; + return fn; +} + +/** + * Support testing using an element + * @param {Function} fn Passed the created element and returns a boolean result + */ +function assert( fn ) { + var el = document.createElement( "fieldset" ); + + try { + return !!fn( el ); + } catch ( e ) { + return false; + } finally { + + // Remove from its parent by default + if ( el.parentNode ) { + el.parentNode.removeChild( el ); + } + + // release memory in IE + el = null; + } +} + +/** + * Adds the same handler for all of the specified attrs + * @param {String} attrs Pipe-separated list of attributes + * @param {Function} handler The method that will be applied + */ +function addHandle( attrs, handler ) { + var arr = attrs.split( "|" ), + i = arr.length; + + while ( i-- ) { + Expr.attrHandle[ arr[ i ] ] = handler; + } +} + +/** + * Checks document order of two siblings + * @param {Element} a + * @param {Element} b + * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b + */ +function siblingCheck( a, b ) { + var cur = b && a, + diff = cur && a.nodeType === 1 && b.nodeType === 1 && + a.sourceIndex - b.sourceIndex; + + // Use IE sourceIndex if available on both nodes + if ( diff ) { + return diff; + } + + // Check if b follows a + if ( cur ) { + while ( ( cur = cur.nextSibling ) ) { + if ( cur === b ) { + return -1; + } + } + } + + return a ? 1 : -1; +} + +/** + * Returns a function to use in pseudos for input types + * @param {String} type + */ +function createInputPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for buttons + * @param {String} type + */ +function createButtonPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return ( name === "input" || name === "button" ) && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for :enabled/:disabled + * @param {Boolean} disabled true for :disabled; false for :enabled + */ +function createDisabledPseudo( disabled ) { + + // Known :disabled false positives: fieldset[disabled] > legend:nth-of-type(n+2) :can-disable + return function( elem ) { + + // Only certain elements can match :enabled or :disabled + // https://html.spec.whatwg.org/multipage/scripting.html#selector-enabled + // https://html.spec.whatwg.org/multipage/scripting.html#selector-disabled + if ( "form" in elem ) { + + // Check for inherited disabledness on relevant non-disabled elements: + // * listed form-associated elements in a disabled fieldset + // https://html.spec.whatwg.org/multipage/forms.html#category-listed + // https://html.spec.whatwg.org/multipage/forms.html#concept-fe-disabled + // * option elements in a disabled optgroup + // https://html.spec.whatwg.org/multipage/forms.html#concept-option-disabled + // All such elements have a "form" property. + if ( elem.parentNode && elem.disabled === false ) { + + // Option elements defer to a parent optgroup if present + if ( "label" in elem ) { + if ( "label" in elem.parentNode ) { + return elem.parentNode.disabled === disabled; + } else { + return elem.disabled === disabled; + } + } + + // Support: IE 6 - 11 + // Use the isDisabled shortcut property to check for disabled fieldset ancestors + return elem.isDisabled === disabled || + + // Where there is no isDisabled, check manually + /* jshint -W018 */ + elem.isDisabled !== !disabled && + inDisabledFieldset( elem ) === disabled; + } + + return elem.disabled === disabled; + + // Try to winnow out elements that can't be disabled before trusting the disabled property. + // Some victims get caught in our net (label, legend, menu, track), but it shouldn't + // even exist on them, let alone have a boolean value. + } else if ( "label" in elem ) { + return elem.disabled === disabled; + } + + // Remaining elements are neither :enabled nor :disabled + return false; + }; +} + +/** + * Returns a function to use in pseudos for positionals + * @param {Function} fn + */ +function createPositionalPseudo( fn ) { + return markFunction( function( argument ) { + argument = +argument; + return markFunction( function( seed, matches ) { + var j, + matchIndexes = fn( [], seed.length, argument ), + i = matchIndexes.length; + + // Match elements found at the specified indexes + while ( i-- ) { + if ( seed[ ( j = matchIndexes[ i ] ) ] ) { + seed[ j ] = !( matches[ j ] = seed[ j ] ); + } + } + } ); + } ); +} + +/** + * Checks a node for validity as a Sizzle context + * @param {Element|Object=} context + * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value + */ +function testContext( context ) { + return context && typeof context.getElementsByTagName !== "undefined" && context; +} + +// Expose support vars for convenience +support = Sizzle.support = {}; + +/** + * Detects XML nodes + * @param {Element|Object} elem An element or a document + * @returns {Boolean} True iff elem is a non-HTML XML node + */ +isXML = Sizzle.isXML = function( elem ) { + var namespace = elem.namespaceURI, + docElem = ( elem.ownerDocument || elem ).documentElement; + + // Support: IE <=8 + // Assume HTML when documentElement doesn't yet exist, such as inside loading iframes + // https://bugs.jquery.com/ticket/4833 + return !rhtml.test( namespace || docElem && docElem.nodeName || "HTML" ); +}; + +/** + * Sets document-related variables once based on the current document + * @param {Element|Object} [doc] An element or document object to use to set the document + * @returns {Object} Returns the current document + */ +setDocument = Sizzle.setDocument = function( node ) { + var hasCompare, subWindow, + doc = node ? node.ownerDocument || node : preferredDoc; + + // Return early if doc is invalid or already selected + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( doc == document || doc.nodeType !== 9 || !doc.documentElement ) { + return document; + } + + // Update global variables + document = doc; + docElem = document.documentElement; + documentIsHTML = !isXML( document ); + + // Support: IE 9 - 11+, Edge 12 - 18+ + // Accessing iframe documents after unload throws "permission denied" errors (jQuery #13936) + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( preferredDoc != document && + ( subWindow = document.defaultView ) && subWindow.top !== subWindow ) { + + // Support: IE 11, Edge + if ( subWindow.addEventListener ) { + subWindow.addEventListener( "unload", unloadHandler, false ); + + // Support: IE 9 - 10 only + } else if ( subWindow.attachEvent ) { + subWindow.attachEvent( "onunload", unloadHandler ); + } + } + + // Support: IE 8 - 11+, Edge 12 - 18+, Chrome <=16 - 25 only, Firefox <=3.6 - 31 only, + // Safari 4 - 5 only, Opera <=11.6 - 12.x only + // IE/Edge & older browsers don't support the :scope pseudo-class. + // Support: Safari 6.0 only + // Safari 6.0 supports :scope but it's an alias of :root there. + support.scope = assert( function( el ) { + docElem.appendChild( el ).appendChild( document.createElement( "div" ) ); + return typeof el.querySelectorAll !== "undefined" && + !el.querySelectorAll( ":scope fieldset div" ).length; + } ); + + /* Attributes + ---------------------------------------------------------------------- */ + + // Support: IE<8 + // Verify that getAttribute really returns attributes and not properties + // (excepting IE8 booleans) + support.attributes = assert( function( el ) { + el.className = "i"; + return !el.getAttribute( "className" ); + } ); + + /* getElement(s)By* + ---------------------------------------------------------------------- */ + + // Check if getElementsByTagName("*") returns only elements + support.getElementsByTagName = assert( function( el ) { + el.appendChild( document.createComment( "" ) ); + return !el.getElementsByTagName( "*" ).length; + } ); + + // Support: IE<9 + support.getElementsByClassName = rnative.test( document.getElementsByClassName ); + + // Support: IE<10 + // Check if getElementById returns elements by name + // The broken getElementById methods don't pick up programmatically-set names, + // so use a roundabout getElementsByName test + support.getById = assert( function( el ) { + docElem.appendChild( el ).id = expando; + return !document.getElementsByName || !document.getElementsByName( expando ).length; + } ); + + // ID filter and find + if ( support.getById ) { + Expr.filter[ "ID" ] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + return elem.getAttribute( "id" ) === attrId; + }; + }; + Expr.find[ "ID" ] = function( id, context ) { + if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { + var elem = context.getElementById( id ); + return elem ? [ elem ] : []; + } + }; + } else { + Expr.filter[ "ID" ] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + var node = typeof elem.getAttributeNode !== "undefined" && + elem.getAttributeNode( "id" ); + return node && node.value === attrId; + }; + }; + + // Support: IE 6 - 7 only + // getElementById is not reliable as a find shortcut + Expr.find[ "ID" ] = function( id, context ) { + if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { + var node, i, elems, + elem = context.getElementById( id ); + + if ( elem ) { + + // Verify the id attribute + node = elem.getAttributeNode( "id" ); + if ( node && node.value === id ) { + return [ elem ]; + } + + // Fall back on getElementsByName + elems = context.getElementsByName( id ); + i = 0; + while ( ( elem = elems[ i++ ] ) ) { + node = elem.getAttributeNode( "id" ); + if ( node && node.value === id ) { + return [ elem ]; + } + } + } + + return []; + } + }; + } + + // Tag + Expr.find[ "TAG" ] = support.getElementsByTagName ? + function( tag, context ) { + if ( typeof context.getElementsByTagName !== "undefined" ) { + return context.getElementsByTagName( tag ); + + // DocumentFragment nodes don't have gEBTN + } else if ( support.qsa ) { + return context.querySelectorAll( tag ); + } + } : + + function( tag, context ) { + var elem, + tmp = [], + i = 0, + + // By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too + results = context.getElementsByTagName( tag ); + + // Filter out possible comments + if ( tag === "*" ) { + while ( ( elem = results[ i++ ] ) ) { + if ( elem.nodeType === 1 ) { + tmp.push( elem ); + } + } + + return tmp; + } + return results; + }; + + // Class + Expr.find[ "CLASS" ] = support.getElementsByClassName && function( className, context ) { + if ( typeof context.getElementsByClassName !== "undefined" && documentIsHTML ) { + return context.getElementsByClassName( className ); + } + }; + + /* QSA/matchesSelector + ---------------------------------------------------------------------- */ + + // QSA and matchesSelector support + + // matchesSelector(:active) reports false when true (IE9/Opera 11.5) + rbuggyMatches = []; + + // qSa(:focus) reports false when true (Chrome 21) + // We allow this because of a bug in IE8/9 that throws an error + // whenever `document.activeElement` is accessed on an iframe + // So, we allow :focus to pass through QSA all the time to avoid the IE error + // See https://bugs.jquery.com/ticket/13378 + rbuggyQSA = []; + + if ( ( support.qsa = rnative.test( document.querySelectorAll ) ) ) { + + // Build QSA regex + // Regex strategy adopted from Diego Perini + assert( function( el ) { + + var input; + + // Select is set to empty string on purpose + // This is to test IE's treatment of not explicitly + // setting a boolean content attribute, + // since its presence should be enough + // https://bugs.jquery.com/ticket/12359 + docElem.appendChild( el ).innerHTML = "" + + ""; + + // Support: IE8, Opera 11-12.16 + // Nothing should be selected when empty strings follow ^= or $= or *= + // The test attribute must be unknown in Opera but "safe" for WinRT + // https://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section + if ( el.querySelectorAll( "[msallowcapture^='']" ).length ) { + rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); + } + + // Support: IE8 + // Boolean attributes and "value" are not treated correctly + if ( !el.querySelectorAll( "[selected]" ).length ) { + rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); + } + + // Support: Chrome<29, Android<4.4, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.8+ + if ( !el.querySelectorAll( "[id~=" + expando + "-]" ).length ) { + rbuggyQSA.push( "~=" ); + } + + // Support: IE 11+, Edge 15 - 18+ + // IE 11/Edge don't find elements on a `[name='']` query in some cases. + // Adding a temporary attribute to the document before the selection works + // around the issue. + // Interestingly, IE 10 & older don't seem to have the issue. + input = document.createElement( "input" ); + input.setAttribute( "name", "" ); + el.appendChild( input ); + if ( !el.querySelectorAll( "[name='']" ).length ) { + rbuggyQSA.push( "\\[" + whitespace + "*name" + whitespace + "*=" + + whitespace + "*(?:''|\"\")" ); + } + + // Webkit/Opera - :checked should return selected option elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + // IE8 throws error here and will not see later tests + if ( !el.querySelectorAll( ":checked" ).length ) { + rbuggyQSA.push( ":checked" ); + } + + // Support: Safari 8+, iOS 8+ + // https://bugs.webkit.org/show_bug.cgi?id=136851 + // In-page `selector#id sibling-combinator selector` fails + if ( !el.querySelectorAll( "a#" + expando + "+*" ).length ) { + rbuggyQSA.push( ".#.+[+~]" ); + } + + // Support: Firefox <=3.6 - 5 only + // Old Firefox doesn't throw on a badly-escaped identifier. + el.querySelectorAll( "\\\f" ); + rbuggyQSA.push( "[\\r\\n\\f]" ); + } ); + + assert( function( el ) { + el.innerHTML = "" + + ""; + + // Support: Windows 8 Native Apps + // The type and name attributes are restricted during .innerHTML assignment + var input = document.createElement( "input" ); + input.setAttribute( "type", "hidden" ); + el.appendChild( input ).setAttribute( "name", "D" ); + + // Support: IE8 + // Enforce case-sensitivity of name attribute + if ( el.querySelectorAll( "[name=d]" ).length ) { + rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" ); + } + + // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) + // IE8 throws error here and will not see later tests + if ( el.querySelectorAll( ":enabled" ).length !== 2 ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Support: IE9-11+ + // IE's :disabled selector does not pick up the children of disabled fieldsets + docElem.appendChild( el ).disabled = true; + if ( el.querySelectorAll( ":disabled" ).length !== 2 ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Support: Opera 10 - 11 only + // Opera 10-11 does not throw on post-comma invalid pseudos + el.querySelectorAll( "*,:x" ); + rbuggyQSA.push( ",.*:" ); + } ); + } + + if ( ( support.matchesSelector = rnative.test( ( matches = docElem.matches || + docElem.webkitMatchesSelector || + docElem.mozMatchesSelector || + docElem.oMatchesSelector || + docElem.msMatchesSelector ) ) ) ) { + + assert( function( el ) { + + // Check to see if it's possible to do matchesSelector + // on a disconnected node (IE 9) + support.disconnectedMatch = matches.call( el, "*" ); + + // This should fail with an exception + // Gecko does not error, returns false instead + matches.call( el, "[s!='']:x" ); + rbuggyMatches.push( "!=", pseudos ); + } ); + } + + rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join( "|" ) ); + rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join( "|" ) ); + + /* Contains + ---------------------------------------------------------------------- */ + hasCompare = rnative.test( docElem.compareDocumentPosition ); + + // Element contains another + // Purposefully self-exclusive + // As in, an element does not contain itself + contains = hasCompare || rnative.test( docElem.contains ) ? + function( a, b ) { + var adown = a.nodeType === 9 ? a.documentElement : a, + bup = b && b.parentNode; + return a === bup || !!( bup && bup.nodeType === 1 && ( + adown.contains ? + adown.contains( bup ) : + a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 + ) ); + } : + function( a, b ) { + if ( b ) { + while ( ( b = b.parentNode ) ) { + if ( b === a ) { + return true; + } + } + } + return false; + }; + + /* Sorting + ---------------------------------------------------------------------- */ + + // Document order sorting + sortOrder = hasCompare ? + function( a, b ) { + + // Flag for duplicate removal + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + // Sort on method existence if only one input has compareDocumentPosition + var compare = !a.compareDocumentPosition - !b.compareDocumentPosition; + if ( compare ) { + return compare; + } + + // Calculate position if both inputs belong to the same document + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + compare = ( a.ownerDocument || a ) == ( b.ownerDocument || b ) ? + a.compareDocumentPosition( b ) : + + // Otherwise we know they are disconnected + 1; + + // Disconnected nodes + if ( compare & 1 || + ( !support.sortDetached && b.compareDocumentPosition( a ) === compare ) ) { + + // Choose the first element that is related to our preferred document + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( a == document || a.ownerDocument == preferredDoc && + contains( preferredDoc, a ) ) { + return -1; + } + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( b == document || b.ownerDocument == preferredDoc && + contains( preferredDoc, b ) ) { + return 1; + } + + // Maintain original order + return sortInput ? + ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : + 0; + } + + return compare & 4 ? -1 : 1; + } : + function( a, b ) { + + // Exit early if the nodes are identical + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + var cur, + i = 0, + aup = a.parentNode, + bup = b.parentNode, + ap = [ a ], + bp = [ b ]; + + // Parentless nodes are either documents or disconnected + if ( !aup || !bup ) { + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + /* eslint-disable eqeqeq */ + return a == document ? -1 : + b == document ? 1 : + /* eslint-enable eqeqeq */ + aup ? -1 : + bup ? 1 : + sortInput ? + ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : + 0; + + // If the nodes are siblings, we can do a quick check + } else if ( aup === bup ) { + return siblingCheck( a, b ); + } + + // Otherwise we need full lists of their ancestors for comparison + cur = a; + while ( ( cur = cur.parentNode ) ) { + ap.unshift( cur ); + } + cur = b; + while ( ( cur = cur.parentNode ) ) { + bp.unshift( cur ); + } + + // Walk down the tree looking for a discrepancy + while ( ap[ i ] === bp[ i ] ) { + i++; + } + + return i ? + + // Do a sibling check if the nodes have a common ancestor + siblingCheck( ap[ i ], bp[ i ] ) : + + // Otherwise nodes in our document sort first + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + /* eslint-disable eqeqeq */ + ap[ i ] == preferredDoc ? -1 : + bp[ i ] == preferredDoc ? 1 : + /* eslint-enable eqeqeq */ + 0; + }; + + return document; +}; + +Sizzle.matches = function( expr, elements ) { + return Sizzle( expr, null, null, elements ); +}; + +Sizzle.matchesSelector = function( elem, expr ) { + setDocument( elem ); + + if ( support.matchesSelector && documentIsHTML && + !nonnativeSelectorCache[ expr + " " ] && + ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && + ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { + + try { + var ret = matches.call( elem, expr ); + + // IE 9's matchesSelector returns false on disconnected nodes + if ( ret || support.disconnectedMatch || + + // As well, disconnected nodes are said to be in a document + // fragment in IE 9 + elem.document && elem.document.nodeType !== 11 ) { + return ret; + } + } catch ( e ) { + nonnativeSelectorCache( expr, true ); + } + } + + return Sizzle( expr, document, null, [ elem ] ).length > 0; +}; + +Sizzle.contains = function( context, elem ) { + + // Set document vars if needed + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( ( context.ownerDocument || context ) != document ) { + setDocument( context ); + } + return contains( context, elem ); +}; + +Sizzle.attr = function( elem, name ) { + + // Set document vars if needed + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( ( elem.ownerDocument || elem ) != document ) { + setDocument( elem ); + } + + var fn = Expr.attrHandle[ name.toLowerCase() ], + + // Don't get fooled by Object.prototype properties (jQuery #13807) + val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? + fn( elem, name, !documentIsHTML ) : + undefined; + + return val !== undefined ? + val : + support.attributes || !documentIsHTML ? + elem.getAttribute( name ) : + ( val = elem.getAttributeNode( name ) ) && val.specified ? + val.value : + null; +}; + +Sizzle.escape = function( sel ) { + return ( sel + "" ).replace( rcssescape, fcssescape ); +}; + +Sizzle.error = function( msg ) { + throw new Error( "Syntax error, unrecognized expression: " + msg ); +}; + +/** + * Document sorting and removing duplicates + * @param {ArrayLike} results + */ +Sizzle.uniqueSort = function( results ) { + var elem, + duplicates = [], + j = 0, + i = 0; + + // Unless we *know* we can detect duplicates, assume their presence + hasDuplicate = !support.detectDuplicates; + sortInput = !support.sortStable && results.slice( 0 ); + results.sort( sortOrder ); + + if ( hasDuplicate ) { + while ( ( elem = results[ i++ ] ) ) { + if ( elem === results[ i ] ) { + j = duplicates.push( i ); + } + } + while ( j-- ) { + results.splice( duplicates[ j ], 1 ); + } + } + + // Clear input after sorting to release objects + // See https://github.com/jquery/sizzle/pull/225 + sortInput = null; + + return results; +}; + +/** + * Utility function for retrieving the text value of an array of DOM nodes + * @param {Array|Element} elem + */ +getText = Sizzle.getText = function( elem ) { + var node, + ret = "", + i = 0, + nodeType = elem.nodeType; + + if ( !nodeType ) { + + // If no nodeType, this is expected to be an array + while ( ( node = elem[ i++ ] ) ) { + + // Do not traverse comment nodes + ret += getText( node ); + } + } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { + + // Use textContent for elements + // innerText usage removed for consistency of new lines (jQuery #11153) + if ( typeof elem.textContent === "string" ) { + return elem.textContent; + } else { + + // Traverse its children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + ret += getText( elem ); + } + } + } else if ( nodeType === 3 || nodeType === 4 ) { + return elem.nodeValue; + } + + // Do not include comment or processing instruction nodes + + return ret; +}; + +Expr = Sizzle.selectors = { + + // Can be adjusted by the user + cacheLength: 50, + + createPseudo: markFunction, + + match: matchExpr, + + attrHandle: {}, + + find: {}, + + relative: { + ">": { dir: "parentNode", first: true }, + " ": { dir: "parentNode" }, + "+": { dir: "previousSibling", first: true }, + "~": { dir: "previousSibling" } + }, + + preFilter: { + "ATTR": function( match ) { + match[ 1 ] = match[ 1 ].replace( runescape, funescape ); + + // Move the given value to match[3] whether quoted or unquoted + match[ 3 ] = ( match[ 3 ] || match[ 4 ] || + match[ 5 ] || "" ).replace( runescape, funescape ); + + if ( match[ 2 ] === "~=" ) { + match[ 3 ] = " " + match[ 3 ] + " "; + } + + return match.slice( 0, 4 ); + }, + + "CHILD": function( match ) { + + /* matches from matchExpr["CHILD"] + 1 type (only|nth|...) + 2 what (child|of-type) + 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) + 4 xn-component of xn+y argument ([+-]?\d*n|) + 5 sign of xn-component + 6 x of xn-component + 7 sign of y-component + 8 y of y-component + */ + match[ 1 ] = match[ 1 ].toLowerCase(); + + if ( match[ 1 ].slice( 0, 3 ) === "nth" ) { + + // nth-* requires argument + if ( !match[ 3 ] ) { + Sizzle.error( match[ 0 ] ); + } + + // numeric x and y parameters for Expr.filter.CHILD + // remember that false/true cast respectively to 0/1 + match[ 4 ] = +( match[ 4 ] ? + match[ 5 ] + ( match[ 6 ] || 1 ) : + 2 * ( match[ 3 ] === "even" || match[ 3 ] === "odd" ) ); + match[ 5 ] = +( ( match[ 7 ] + match[ 8 ] ) || match[ 3 ] === "odd" ); + + // other types prohibit arguments + } else if ( match[ 3 ] ) { + Sizzle.error( match[ 0 ] ); + } + + return match; + }, + + "PSEUDO": function( match ) { + var excess, + unquoted = !match[ 6 ] && match[ 2 ]; + + if ( matchExpr[ "CHILD" ].test( match[ 0 ] ) ) { + return null; + } + + // Accept quoted arguments as-is + if ( match[ 3 ] ) { + match[ 2 ] = match[ 4 ] || match[ 5 ] || ""; + + // Strip excess characters from unquoted arguments + } else if ( unquoted && rpseudo.test( unquoted ) && + + // Get excess from tokenize (recursively) + ( excess = tokenize( unquoted, true ) ) && + + // advance to the next closing parenthesis + ( excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length ) ) { + + // excess is a negative index + match[ 0 ] = match[ 0 ].slice( 0, excess ); + match[ 2 ] = unquoted.slice( 0, excess ); + } + + // Return only captures needed by the pseudo filter method (type and argument) + return match.slice( 0, 3 ); + } + }, + + filter: { + + "TAG": function( nodeNameSelector ) { + var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); + return nodeNameSelector === "*" ? + function() { + return true; + } : + function( elem ) { + return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; + }; + }, + + "CLASS": function( className ) { + var pattern = classCache[ className + " " ]; + + return pattern || + ( pattern = new RegExp( "(^|" + whitespace + + ")" + className + "(" + whitespace + "|$)" ) ) && classCache( + className, function( elem ) { + return pattern.test( + typeof elem.className === "string" && elem.className || + typeof elem.getAttribute !== "undefined" && + elem.getAttribute( "class" ) || + "" + ); + } ); + }, + + "ATTR": function( name, operator, check ) { + return function( elem ) { + var result = Sizzle.attr( elem, name ); + + if ( result == null ) { + return operator === "!="; + } + if ( !operator ) { + return true; + } + + result += ""; + + /* eslint-disable max-len */ + + return operator === "=" ? result === check : + operator === "!=" ? result !== check : + operator === "^=" ? check && result.indexOf( check ) === 0 : + operator === "*=" ? check && result.indexOf( check ) > -1 : + operator === "$=" ? check && result.slice( -check.length ) === check : + operator === "~=" ? ( " " + result.replace( rwhitespace, " " ) + " " ).indexOf( check ) > -1 : + operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : + false; + /* eslint-enable max-len */ + + }; + }, + + "CHILD": function( type, what, _argument, first, last ) { + var simple = type.slice( 0, 3 ) !== "nth", + forward = type.slice( -4 ) !== "last", + ofType = what === "of-type"; + + return first === 1 && last === 0 ? + + // Shortcut for :nth-*(n) + function( elem ) { + return !!elem.parentNode; + } : + + function( elem, _context, xml ) { + var cache, uniqueCache, outerCache, node, nodeIndex, start, + dir = simple !== forward ? "nextSibling" : "previousSibling", + parent = elem.parentNode, + name = ofType && elem.nodeName.toLowerCase(), + useCache = !xml && !ofType, + diff = false; + + if ( parent ) { + + // :(first|last|only)-(child|of-type) + if ( simple ) { + while ( dir ) { + node = elem; + while ( ( node = node[ dir ] ) ) { + if ( ofType ? + node.nodeName.toLowerCase() === name : + node.nodeType === 1 ) { + + return false; + } + } + + // Reverse direction for :only-* (if we haven't yet done so) + start = dir = type === "only" && !start && "nextSibling"; + } + return true; + } + + start = [ forward ? parent.firstChild : parent.lastChild ]; + + // non-xml :nth-child(...) stores cache data on `parent` + if ( forward && useCache ) { + + // Seek `elem` from a previously-cached index + + // ...in a gzip-friendly way + node = parent; + outerCache = node[ expando ] || ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + cache = uniqueCache[ type ] || []; + nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; + diff = nodeIndex && cache[ 2 ]; + node = nodeIndex && parent.childNodes[ nodeIndex ]; + + while ( ( node = ++nodeIndex && node && node[ dir ] || + + // Fallback to seeking `elem` from the start + ( diff = nodeIndex = 0 ) || start.pop() ) ) { + + // When found, cache indexes on `parent` and break + if ( node.nodeType === 1 && ++diff && node === elem ) { + uniqueCache[ type ] = [ dirruns, nodeIndex, diff ]; + break; + } + } + + } else { + + // Use previously-cached element index if available + if ( useCache ) { + + // ...in a gzip-friendly way + node = elem; + outerCache = node[ expando ] || ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + cache = uniqueCache[ type ] || []; + nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; + diff = nodeIndex; + } + + // xml :nth-child(...) + // or :nth-last-child(...) or :nth(-last)?-of-type(...) + if ( diff === false ) { + + // Use the same loop as above to seek `elem` from the start + while ( ( node = ++nodeIndex && node && node[ dir ] || + ( diff = nodeIndex = 0 ) || start.pop() ) ) { + + if ( ( ofType ? + node.nodeName.toLowerCase() === name : + node.nodeType === 1 ) && + ++diff ) { + + // Cache the index of each encountered element + if ( useCache ) { + outerCache = node[ expando ] || + ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + uniqueCache[ type ] = [ dirruns, diff ]; + } + + if ( node === elem ) { + break; + } + } + } + } + } + + // Incorporate the offset, then check against cycle size + diff -= last; + return diff === first || ( diff % first === 0 && diff / first >= 0 ); + } + }; + }, + + "PSEUDO": function( pseudo, argument ) { + + // pseudo-class names are case-insensitive + // http://www.w3.org/TR/selectors/#pseudo-PyCTBN + // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters + // Remember that setFilters inherits from pseudos + var args, + fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || + Sizzle.error( "unsupported pseudo: " + pseudo ); + + // The user may use createPseudo to indicate that + // arguments are needed to create the filter function + // just as Sizzle does + if ( fn[ expando ] ) { + return fn( argument ); + } + + // But maintain support for old signatures + if ( fn.length > 1 ) { + args = [ pseudo, pseudo, "", argument ]; + return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? + markFunction( function( seed, matches ) { + var idx, + matched = fn( seed, argument ), + i = matched.length; + while ( i-- ) { + idx = indexOf( seed, matched[ i ] ); + seed[ idx ] = !( matches[ idx ] = matched[ i ] ); + } + } ) : + function( elem ) { + return fn( elem, 0, args ); + }; + } + + return fn; + } + }, + + pseudos: { + + // Potentially complex pseudos + "not": markFunction( function( selector ) { + + // Trim the selector passed to compile + // to avoid treating leading and trailing + // spaces as combinators + var input = [], + results = [], + matcher = compile( selector.replace( rtrim, "$1" ) ); + + return matcher[ expando ] ? + markFunction( function( seed, matches, _context, xml ) { + var elem, + unmatched = matcher( seed, null, xml, [] ), + i = seed.length; + + // Match elements unmatched by `matcher` + while ( i-- ) { + if ( ( elem = unmatched[ i ] ) ) { + seed[ i ] = !( matches[ i ] = elem ); + } + } + } ) : + function( elem, _context, xml ) { + input[ 0 ] = elem; + matcher( input, null, xml, results ); + + // Don't keep the element (issue #299) + input[ 0 ] = null; + return !results.pop(); + }; + } ), + + "has": markFunction( function( selector ) { + return function( elem ) { + return Sizzle( selector, elem ).length > 0; + }; + } ), + + "contains": markFunction( function( text ) { + text = text.replace( runescape, funescape ); + return function( elem ) { + return ( elem.textContent || getText( elem ) ).indexOf( text ) > -1; + }; + } ), + + // "Whether an element is represented by a :lang() selector + // is based solely on the element's language value + // being equal to the identifier C, + // or beginning with the identifier C immediately followed by "-". + // The matching of C against the element's language value is performed case-insensitively. + // The identifier C does not have to be a valid language name." + // http://www.w3.org/TR/selectors/#lang-pseudo + "lang": markFunction( function( lang ) { + + // lang value must be a valid identifier + if ( !ridentifier.test( lang || "" ) ) { + Sizzle.error( "unsupported lang: " + lang ); + } + lang = lang.replace( runescape, funescape ).toLowerCase(); + return function( elem ) { + var elemLang; + do { + if ( ( elemLang = documentIsHTML ? + elem.lang : + elem.getAttribute( "xml:lang" ) || elem.getAttribute( "lang" ) ) ) { + + elemLang = elemLang.toLowerCase(); + return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; + } + } while ( ( elem = elem.parentNode ) && elem.nodeType === 1 ); + return false; + }; + } ), + + // Miscellaneous + "target": function( elem ) { + var hash = window.location && window.location.hash; + return hash && hash.slice( 1 ) === elem.id; + }, + + "root": function( elem ) { + return elem === docElem; + }, + + "focus": function( elem ) { + return elem === document.activeElement && + ( !document.hasFocus || document.hasFocus() ) && + !!( elem.type || elem.href || ~elem.tabIndex ); + }, + + // Boolean properties + "enabled": createDisabledPseudo( false ), + "disabled": createDisabledPseudo( true ), + + "checked": function( elem ) { + + // In CSS3, :checked should return both checked and selected elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + var nodeName = elem.nodeName.toLowerCase(); + return ( nodeName === "input" && !!elem.checked ) || + ( nodeName === "option" && !!elem.selected ); + }, + + "selected": function( elem ) { + + // Accessing this property makes selected-by-default + // options in Safari work properly + if ( elem.parentNode ) { + // eslint-disable-next-line no-unused-expressions + elem.parentNode.selectedIndex; + } + + return elem.selected === true; + }, + + // Contents + "empty": function( elem ) { + + // http://www.w3.org/TR/selectors/#empty-pseudo + // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5), + // but not by others (comment: 8; processing instruction: 7; etc.) + // nodeType < 6 works because attributes (2) do not appear as children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + if ( elem.nodeType < 6 ) { + return false; + } + } + return true; + }, + + "parent": function( elem ) { + return !Expr.pseudos[ "empty" ]( elem ); + }, + + // Element/input types + "header": function( elem ) { + return rheader.test( elem.nodeName ); + }, + + "input": function( elem ) { + return rinputs.test( elem.nodeName ); + }, + + "button": function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === "button" || name === "button"; + }, + + "text": function( elem ) { + var attr; + return elem.nodeName.toLowerCase() === "input" && + elem.type === "text" && + + // Support: IE<8 + // New HTML5 attribute values (e.g., "search") appear with elem.type === "text" + ( ( attr = elem.getAttribute( "type" ) ) == null || + attr.toLowerCase() === "text" ); + }, + + // Position-in-collection + "first": createPositionalPseudo( function() { + return [ 0 ]; + } ), + + "last": createPositionalPseudo( function( _matchIndexes, length ) { + return [ length - 1 ]; + } ), + + "eq": createPositionalPseudo( function( _matchIndexes, length, argument ) { + return [ argument < 0 ? argument + length : argument ]; + } ), + + "even": createPositionalPseudo( function( matchIndexes, length ) { + var i = 0; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "odd": createPositionalPseudo( function( matchIndexes, length ) { + var i = 1; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "lt": createPositionalPseudo( function( matchIndexes, length, argument ) { + var i = argument < 0 ? + argument + length : + argument > length ? + length : + argument; + for ( ; --i >= 0; ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "gt": createPositionalPseudo( function( matchIndexes, length, argument ) { + var i = argument < 0 ? argument + length : argument; + for ( ; ++i < length; ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ) + } +}; + +Expr.pseudos[ "nth" ] = Expr.pseudos[ "eq" ]; + +// Add button/input type pseudos +for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { + Expr.pseudos[ i ] = createInputPseudo( i ); +} +for ( i in { submit: true, reset: true } ) { + Expr.pseudos[ i ] = createButtonPseudo( i ); +} + +// Easy API for creating new setFilters +function setFilters() {} +setFilters.prototype = Expr.filters = Expr.pseudos; +Expr.setFilters = new setFilters(); + +tokenize = Sizzle.tokenize = function( selector, parseOnly ) { + var matched, match, tokens, type, + soFar, groups, preFilters, + cached = tokenCache[ selector + " " ]; + + if ( cached ) { + return parseOnly ? 0 : cached.slice( 0 ); + } + + soFar = selector; + groups = []; + preFilters = Expr.preFilter; + + while ( soFar ) { + + // Comma and first run + if ( !matched || ( match = rcomma.exec( soFar ) ) ) { + if ( match ) { + + // Don't consume trailing commas as valid + soFar = soFar.slice( match[ 0 ].length ) || soFar; + } + groups.push( ( tokens = [] ) ); + } + + matched = false; + + // Combinators + if ( ( match = rcombinators.exec( soFar ) ) ) { + matched = match.shift(); + tokens.push( { + value: matched, + + // Cast descendant combinators to space + type: match[ 0 ].replace( rtrim, " " ) + } ); + soFar = soFar.slice( matched.length ); + } + + // Filters + for ( type in Expr.filter ) { + if ( ( match = matchExpr[ type ].exec( soFar ) ) && ( !preFilters[ type ] || + ( match = preFilters[ type ]( match ) ) ) ) { + matched = match.shift(); + tokens.push( { + value: matched, + type: type, + matches: match + } ); + soFar = soFar.slice( matched.length ); + } + } + + if ( !matched ) { + break; + } + } + + // Return the length of the invalid excess + // if we're just parsing + // Otherwise, throw an error or return tokens + return parseOnly ? + soFar.length : + soFar ? + Sizzle.error( selector ) : + + // Cache the tokens + tokenCache( selector, groups ).slice( 0 ); +}; + +function toSelector( tokens ) { + var i = 0, + len = tokens.length, + selector = ""; + for ( ; i < len; i++ ) { + selector += tokens[ i ].value; + } + return selector; +} + +function addCombinator( matcher, combinator, base ) { + var dir = combinator.dir, + skip = combinator.next, + key = skip || dir, + checkNonElements = base && key === "parentNode", + doneName = done++; + + return combinator.first ? + + // Check against closest ancestor/preceding element + function( elem, context, xml ) { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + return matcher( elem, context, xml ); + } + } + return false; + } : + + // Check against all ancestor/preceding elements + function( elem, context, xml ) { + var oldCache, uniqueCache, outerCache, + newCache = [ dirruns, doneName ]; + + // We can't set arbitrary data on XML nodes, so they don't benefit from combinator caching + if ( xml ) { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + if ( matcher( elem, context, xml ) ) { + return true; + } + } + } + } else { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + outerCache = elem[ expando ] || ( elem[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ elem.uniqueID ] || + ( outerCache[ elem.uniqueID ] = {} ); + + if ( skip && skip === elem.nodeName.toLowerCase() ) { + elem = elem[ dir ] || elem; + } else if ( ( oldCache = uniqueCache[ key ] ) && + oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) { + + // Assign to newCache so results back-propagate to previous elements + return ( newCache[ 2 ] = oldCache[ 2 ] ); + } else { + + // Reuse newcache so results back-propagate to previous elements + uniqueCache[ key ] = newCache; + + // A match means we're done; a fail means we have to keep checking + if ( ( newCache[ 2 ] = matcher( elem, context, xml ) ) ) { + return true; + } + } + } + } + } + return false; + }; +} + +function elementMatcher( matchers ) { + return matchers.length > 1 ? + function( elem, context, xml ) { + var i = matchers.length; + while ( i-- ) { + if ( !matchers[ i ]( elem, context, xml ) ) { + return false; + } + } + return true; + } : + matchers[ 0 ]; +} + +function multipleContexts( selector, contexts, results ) { + var i = 0, + len = contexts.length; + for ( ; i < len; i++ ) { + Sizzle( selector, contexts[ i ], results ); + } + return results; +} + +function condense( unmatched, map, filter, context, xml ) { + var elem, + newUnmatched = [], + i = 0, + len = unmatched.length, + mapped = map != null; + + for ( ; i < len; i++ ) { + if ( ( elem = unmatched[ i ] ) ) { + if ( !filter || filter( elem, context, xml ) ) { + newUnmatched.push( elem ); + if ( mapped ) { + map.push( i ); + } + } + } + } + + return newUnmatched; +} + +function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { + if ( postFilter && !postFilter[ expando ] ) { + postFilter = setMatcher( postFilter ); + } + if ( postFinder && !postFinder[ expando ] ) { + postFinder = setMatcher( postFinder, postSelector ); + } + return markFunction( function( seed, results, context, xml ) { + var temp, i, elem, + preMap = [], + postMap = [], + preexisting = results.length, + + // Get initial elements from seed or context + elems = seed || multipleContexts( + selector || "*", + context.nodeType ? [ context ] : context, + [] + ), + + // Prefilter to get matcher input, preserving a map for seed-results synchronization + matcherIn = preFilter && ( seed || !selector ) ? + condense( elems, preMap, preFilter, context, xml ) : + elems, + + matcherOut = matcher ? + + // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, + postFinder || ( seed ? preFilter : preexisting || postFilter ) ? + + // ...intermediate processing is necessary + [] : + + // ...otherwise use results directly + results : + matcherIn; + + // Find primary matches + if ( matcher ) { + matcher( matcherIn, matcherOut, context, xml ); + } + + // Apply postFilter + if ( postFilter ) { + temp = condense( matcherOut, postMap ); + postFilter( temp, [], context, xml ); + + // Un-match failing elements by moving them back to matcherIn + i = temp.length; + while ( i-- ) { + if ( ( elem = temp[ i ] ) ) { + matcherOut[ postMap[ i ] ] = !( matcherIn[ postMap[ i ] ] = elem ); + } + } + } + + if ( seed ) { + if ( postFinder || preFilter ) { + if ( postFinder ) { + + // Get the final matcherOut by condensing this intermediate into postFinder contexts + temp = []; + i = matcherOut.length; + while ( i-- ) { + if ( ( elem = matcherOut[ i ] ) ) { + + // Restore matcherIn since elem is not yet a final match + temp.push( ( matcherIn[ i ] = elem ) ); + } + } + postFinder( null, ( matcherOut = [] ), temp, xml ); + } + + // Move matched elements from seed to results to keep them synchronized + i = matcherOut.length; + while ( i-- ) { + if ( ( elem = matcherOut[ i ] ) && + ( temp = postFinder ? indexOf( seed, elem ) : preMap[ i ] ) > -1 ) { + + seed[ temp ] = !( results[ temp ] = elem ); + } + } + } + + // Add elements to results, through postFinder if defined + } else { + matcherOut = condense( + matcherOut === results ? + matcherOut.splice( preexisting, matcherOut.length ) : + matcherOut + ); + if ( postFinder ) { + postFinder( null, results, matcherOut, xml ); + } else { + push.apply( results, matcherOut ); + } + } + } ); +} + +function matcherFromTokens( tokens ) { + var checkContext, matcher, j, + len = tokens.length, + leadingRelative = Expr.relative[ tokens[ 0 ].type ], + implicitRelative = leadingRelative || Expr.relative[ " " ], + i = leadingRelative ? 1 : 0, + + // The foundational matcher ensures that elements are reachable from top-level context(s) + matchContext = addCombinator( function( elem ) { + return elem === checkContext; + }, implicitRelative, true ), + matchAnyContext = addCombinator( function( elem ) { + return indexOf( checkContext, elem ) > -1; + }, implicitRelative, true ), + matchers = [ function( elem, context, xml ) { + var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( + ( checkContext = context ).nodeType ? + matchContext( elem, context, xml ) : + matchAnyContext( elem, context, xml ) ); + + // Avoid hanging onto element (issue #299) + checkContext = null; + return ret; + } ]; + + for ( ; i < len; i++ ) { + if ( ( matcher = Expr.relative[ tokens[ i ].type ] ) ) { + matchers = [ addCombinator( elementMatcher( matchers ), matcher ) ]; + } else { + matcher = Expr.filter[ tokens[ i ].type ].apply( null, tokens[ i ].matches ); + + // Return special upon seeing a positional matcher + if ( matcher[ expando ] ) { + + // Find the next relative operator (if any) for proper handling + j = ++i; + for ( ; j < len; j++ ) { + if ( Expr.relative[ tokens[ j ].type ] ) { + break; + } + } + return setMatcher( + i > 1 && elementMatcher( matchers ), + i > 1 && toSelector( + + // If the preceding token was a descendant combinator, insert an implicit any-element `*` + tokens + .slice( 0, i - 1 ) + .concat( { value: tokens[ i - 2 ].type === " " ? "*" : "" } ) + ).replace( rtrim, "$1" ), + matcher, + i < j && matcherFromTokens( tokens.slice( i, j ) ), + j < len && matcherFromTokens( ( tokens = tokens.slice( j ) ) ), + j < len && toSelector( tokens ) + ); + } + matchers.push( matcher ); + } + } + + return elementMatcher( matchers ); +} + +function matcherFromGroupMatchers( elementMatchers, setMatchers ) { + var bySet = setMatchers.length > 0, + byElement = elementMatchers.length > 0, + superMatcher = function( seed, context, xml, results, outermost ) { + var elem, j, matcher, + matchedCount = 0, + i = "0", + unmatched = seed && [], + setMatched = [], + contextBackup = outermostContext, + + // We must always have either seed elements or outermost context + elems = seed || byElement && Expr.find[ "TAG" ]( "*", outermost ), + + // Use integer dirruns iff this is the outermost matcher + dirrunsUnique = ( dirruns += contextBackup == null ? 1 : Math.random() || 0.1 ), + len = elems.length; + + if ( outermost ) { + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + outermostContext = context == document || context || outermost; + } + + // Add elements passing elementMatchers directly to results + // Support: IE<9, Safari + // Tolerate NodeList properties (IE: "length"; Safari: ) matching elements by id + for ( ; i !== len && ( elem = elems[ i ] ) != null; i++ ) { + if ( byElement && elem ) { + j = 0; + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( !context && elem.ownerDocument != document ) { + setDocument( elem ); + xml = !documentIsHTML; + } + while ( ( matcher = elementMatchers[ j++ ] ) ) { + if ( matcher( elem, context || document, xml ) ) { + results.push( elem ); + break; + } + } + if ( outermost ) { + dirruns = dirrunsUnique; + } + } + + // Track unmatched elements for set filters + if ( bySet ) { + + // They will have gone through all possible matchers + if ( ( elem = !matcher && elem ) ) { + matchedCount--; + } + + // Lengthen the array for every element, matched or not + if ( seed ) { + unmatched.push( elem ); + } + } + } + + // `i` is now the count of elements visited above, and adding it to `matchedCount` + // makes the latter nonnegative. + matchedCount += i; + + // Apply set filters to unmatched elements + // NOTE: This can be skipped if there are no unmatched elements (i.e., `matchedCount` + // equals `i`), unless we didn't visit _any_ elements in the above loop because we have + // no element matchers and no seed. + // Incrementing an initially-string "0" `i` allows `i` to remain a string only in that + // case, which will result in a "00" `matchedCount` that differs from `i` but is also + // numerically zero. + if ( bySet && i !== matchedCount ) { + j = 0; + while ( ( matcher = setMatchers[ j++ ] ) ) { + matcher( unmatched, setMatched, context, xml ); + } + + if ( seed ) { + + // Reintegrate element matches to eliminate the need for sorting + if ( matchedCount > 0 ) { + while ( i-- ) { + if ( !( unmatched[ i ] || setMatched[ i ] ) ) { + setMatched[ i ] = pop.call( results ); + } + } + } + + // Discard index placeholder values to get only actual matches + setMatched = condense( setMatched ); + } + + // Add matches to results + push.apply( results, setMatched ); + + // Seedless set matches succeeding multiple successful matchers stipulate sorting + if ( outermost && !seed && setMatched.length > 0 && + ( matchedCount + setMatchers.length ) > 1 ) { + + Sizzle.uniqueSort( results ); + } + } + + // Override manipulation of globals by nested matchers + if ( outermost ) { + dirruns = dirrunsUnique; + outermostContext = contextBackup; + } + + return unmatched; + }; + + return bySet ? + markFunction( superMatcher ) : + superMatcher; +} + +compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) { + var i, + setMatchers = [], + elementMatchers = [], + cached = compilerCache[ selector + " " ]; + + if ( !cached ) { + + // Generate a function of recursive functions that can be used to check each element + if ( !match ) { + match = tokenize( selector ); + } + i = match.length; + while ( i-- ) { + cached = matcherFromTokens( match[ i ] ); + if ( cached[ expando ] ) { + setMatchers.push( cached ); + } else { + elementMatchers.push( cached ); + } + } + + // Cache the compiled function + cached = compilerCache( + selector, + matcherFromGroupMatchers( elementMatchers, setMatchers ) + ); + + // Save selector and tokenization + cached.selector = selector; + } + return cached; +}; + +/** + * A low-level selection function that works with Sizzle's compiled + * selector functions + * @param {String|Function} selector A selector or a pre-compiled + * selector function built with Sizzle.compile + * @param {Element} context + * @param {Array} [results] + * @param {Array} [seed] A set of elements to match against + */ +select = Sizzle.select = function( selector, context, results, seed ) { + var i, tokens, token, type, find, + compiled = typeof selector === "function" && selector, + match = !seed && tokenize( ( selector = compiled.selector || selector ) ); + + results = results || []; + + // Try to minimize operations if there is only one selector in the list and no seed + // (the latter of which guarantees us context) + if ( match.length === 1 ) { + + // Reduce context if the leading compound selector is an ID + tokens = match[ 0 ] = match[ 0 ].slice( 0 ); + if ( tokens.length > 2 && ( token = tokens[ 0 ] ).type === "ID" && + context.nodeType === 9 && documentIsHTML && Expr.relative[ tokens[ 1 ].type ] ) { + + context = ( Expr.find[ "ID" ]( token.matches[ 0 ] + .replace( runescape, funescape ), context ) || [] )[ 0 ]; + if ( !context ) { + return results; + + // Precompiled matchers will still verify ancestry, so step up a level + } else if ( compiled ) { + context = context.parentNode; + } + + selector = selector.slice( tokens.shift().value.length ); + } + + // Fetch a seed set for right-to-left matching + i = matchExpr[ "needsContext" ].test( selector ) ? 0 : tokens.length; + while ( i-- ) { + token = tokens[ i ]; + + // Abort if we hit a combinator + if ( Expr.relative[ ( type = token.type ) ] ) { + break; + } + if ( ( find = Expr.find[ type ] ) ) { + + // Search, expanding context for leading sibling combinators + if ( ( seed = find( + token.matches[ 0 ].replace( runescape, funescape ), + rsibling.test( tokens[ 0 ].type ) && testContext( context.parentNode ) || + context + ) ) ) { + + // If seed is empty or no tokens remain, we can return early + tokens.splice( i, 1 ); + selector = seed.length && toSelector( tokens ); + if ( !selector ) { + push.apply( results, seed ); + return results; + } + + break; + } + } + } + } + + // Compile and execute a filtering function if one is not provided + // Provide `match` to avoid retokenization if we modified the selector above + ( compiled || compile( selector, match ) )( + seed, + context, + !documentIsHTML, + results, + !context || rsibling.test( selector ) && testContext( context.parentNode ) || context + ); + return results; +}; + +// One-time assignments + +// Sort stability +support.sortStable = expando.split( "" ).sort( sortOrder ).join( "" ) === expando; + +// Support: Chrome 14-35+ +// Always assume duplicates if they aren't passed to the comparison function +support.detectDuplicates = !!hasDuplicate; + +// Initialize against the default document +setDocument(); + +// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) +// Detached nodes confoundingly follow *each other* +support.sortDetached = assert( function( el ) { + + // Should return 1, but returns 4 (following) + return el.compareDocumentPosition( document.createElement( "fieldset" ) ) & 1; +} ); + +// Support: IE<8 +// Prevent attribute/property "interpolation" +// https://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx +if ( !assert( function( el ) { + el.innerHTML = ""; + return el.firstChild.getAttribute( "href" ) === "#"; +} ) ) { + addHandle( "type|href|height|width", function( elem, name, isXML ) { + if ( !isXML ) { + return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); + } + } ); +} + +// Support: IE<9 +// Use defaultValue in place of getAttribute("value") +if ( !support.attributes || !assert( function( el ) { + el.innerHTML = ""; + el.firstChild.setAttribute( "value", "" ); + return el.firstChild.getAttribute( "value" ) === ""; +} ) ) { + addHandle( "value", function( elem, _name, isXML ) { + if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { + return elem.defaultValue; + } + } ); +} + +// Support: IE<9 +// Use getAttributeNode to fetch booleans when getAttribute lies +if ( !assert( function( el ) { + return el.getAttribute( "disabled" ) == null; +} ) ) { + addHandle( booleans, function( elem, name, isXML ) { + var val; + if ( !isXML ) { + return elem[ name ] === true ? name.toLowerCase() : + ( val = elem.getAttributeNode( name ) ) && val.specified ? + val.value : + null; + } + } ); +} + +return Sizzle; + +} )( window ); + + + +jQuery.find = Sizzle; +jQuery.expr = Sizzle.selectors; + +// Deprecated +jQuery.expr[ ":" ] = jQuery.expr.pseudos; +jQuery.uniqueSort = jQuery.unique = Sizzle.uniqueSort; +jQuery.text = Sizzle.getText; +jQuery.isXMLDoc = Sizzle.isXML; +jQuery.contains = Sizzle.contains; +jQuery.escapeSelector = Sizzle.escape; + + + + +var dir = function( elem, dir, until ) { + var matched = [], + truncate = until !== undefined; + + while ( ( elem = elem[ dir ] ) && elem.nodeType !== 9 ) { + if ( elem.nodeType === 1 ) { + if ( truncate && jQuery( elem ).is( until ) ) { + break; + } + matched.push( elem ); + } + } + return matched; +}; + + +var siblings = function( n, elem ) { + var matched = []; + + for ( ; n; n = n.nextSibling ) { + if ( n.nodeType === 1 && n !== elem ) { + matched.push( n ); + } + } + + return matched; +}; + + +var rneedsContext = jQuery.expr.match.needsContext; + + + +function nodeName( elem, name ) { + + return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); + +}; +var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i ); + + + +// Implement the identical functionality for filter and not +function winnow( elements, qualifier, not ) { + if ( isFunction( qualifier ) ) { + return jQuery.grep( elements, function( elem, i ) { + return !!qualifier.call( elem, i, elem ) !== not; + } ); + } + + // Single element + if ( qualifier.nodeType ) { + return jQuery.grep( elements, function( elem ) { + return ( elem === qualifier ) !== not; + } ); + } + + // Arraylike of elements (jQuery, arguments, Array) + if ( typeof qualifier !== "string" ) { + return jQuery.grep( elements, function( elem ) { + return ( indexOf.call( qualifier, elem ) > -1 ) !== not; + } ); + } + + // Filtered directly for both simple and complex selectors + return jQuery.filter( qualifier, elements, not ); +} + +jQuery.filter = function( expr, elems, not ) { + var elem = elems[ 0 ]; + + if ( not ) { + expr = ":not(" + expr + ")"; + } + + if ( elems.length === 1 && elem.nodeType === 1 ) { + return jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : []; + } + + return jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { + return elem.nodeType === 1; + } ) ); +}; + +jQuery.fn.extend( { + find: function( selector ) { + var i, ret, + len = this.length, + self = this; + + if ( typeof selector !== "string" ) { + return this.pushStack( jQuery( selector ).filter( function() { + for ( i = 0; i < len; i++ ) { + if ( jQuery.contains( self[ i ], this ) ) { + return true; + } + } + } ) ); + } + + ret = this.pushStack( [] ); + + for ( i = 0; i < len; i++ ) { + jQuery.find( selector, self[ i ], ret ); + } + + return len > 1 ? jQuery.uniqueSort( ret ) : ret; + }, + filter: function( selector ) { + return this.pushStack( winnow( this, selector || [], false ) ); + }, + not: function( selector ) { + return this.pushStack( winnow( this, selector || [], true ) ); + }, + is: function( selector ) { + return !!winnow( + this, + + // If this is a positional/relative selector, check membership in the returned set + // so $("p:first").is("p:last") won't return true for a doc with two "p". + typeof selector === "string" && rneedsContext.test( selector ) ? + jQuery( selector ) : + selector || [], + false + ).length; + } +} ); + + +// Initialize a jQuery object + + +// A central reference to the root jQuery(document) +var rootjQuery, + + // A simple way to check for HTML strings + // Prioritize #id over to avoid XSS via location.hash (#9521) + // Strict HTML recognition (#11290: must start with <) + // Shortcut simple #id case for speed + rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/, + + init = jQuery.fn.init = function( selector, context, root ) { + var match, elem; + + // HANDLE: $(""), $(null), $(undefined), $(false) + if ( !selector ) { + return this; + } + + // Method init() accepts an alternate rootjQuery + // so migrate can support jQuery.sub (gh-2101) + root = root || rootjQuery; + + // Handle HTML strings + if ( typeof selector === "string" ) { + if ( selector[ 0 ] === "<" && + selector[ selector.length - 1 ] === ">" && + selector.length >= 3 ) { + + // Assume that strings that start and end with <> are HTML and skip the regex check + match = [ null, selector, null ]; + + } else { + match = rquickExpr.exec( selector ); + } + + // Match html or make sure no context is specified for #id + if ( match && ( match[ 1 ] || !context ) ) { + + // HANDLE: $(html) -> $(array) + if ( match[ 1 ] ) { + context = context instanceof jQuery ? context[ 0 ] : context; + + // Option to run scripts is true for back-compat + // Intentionally let the error be thrown if parseHTML is not present + jQuery.merge( this, jQuery.parseHTML( + match[ 1 ], + context && context.nodeType ? context.ownerDocument || context : document, + true + ) ); + + // HANDLE: $(html, props) + if ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) { + for ( match in context ) { + + // Properties of context are called as methods if possible + if ( isFunction( this[ match ] ) ) { + this[ match ]( context[ match ] ); + + // ...and otherwise set as attributes + } else { + this.attr( match, context[ match ] ); + } + } + } + + return this; + + // HANDLE: $(#id) + } else { + elem = document.getElementById( match[ 2 ] ); + + if ( elem ) { + + // Inject the element directly into the jQuery object + this[ 0 ] = elem; + this.length = 1; + } + return this; + } + + // HANDLE: $(expr, $(...)) + } else if ( !context || context.jquery ) { + return ( context || root ).find( selector ); + + // HANDLE: $(expr, context) + // (which is just equivalent to: $(context).find(expr) + } else { + return this.constructor( context ).find( selector ); + } + + // HANDLE: $(DOMElement) + } else if ( selector.nodeType ) { + this[ 0 ] = selector; + this.length = 1; + return this; + + // HANDLE: $(function) + // Shortcut for document ready + } else if ( isFunction( selector ) ) { + return root.ready !== undefined ? + root.ready( selector ) : + + // Execute immediately if ready is not present + selector( jQuery ); + } + + return jQuery.makeArray( selector, this ); + }; + +// Give the init function the jQuery prototype for later instantiation +init.prototype = jQuery.fn; + +// Initialize central reference +rootjQuery = jQuery( document ); + + +var rparentsprev = /^(?:parents|prev(?:Until|All))/, + + // Methods guaranteed to produce a unique set when starting from a unique set + guaranteedUnique = { + children: true, + contents: true, + next: true, + prev: true + }; + +jQuery.fn.extend( { + has: function( target ) { + var targets = jQuery( target, this ), + l = targets.length; + + return this.filter( function() { + var i = 0; + for ( ; i < l; i++ ) { + if ( jQuery.contains( this, targets[ i ] ) ) { + return true; + } + } + } ); + }, + + closest: function( selectors, context ) { + var cur, + i = 0, + l = this.length, + matched = [], + targets = typeof selectors !== "string" && jQuery( selectors ); + + // Positional selectors never match, since there's no _selection_ context + if ( !rneedsContext.test( selectors ) ) { + for ( ; i < l; i++ ) { + for ( cur = this[ i ]; cur && cur !== context; cur = cur.parentNode ) { + + // Always skip document fragments + if ( cur.nodeType < 11 && ( targets ? + targets.index( cur ) > -1 : + + // Don't pass non-elements to Sizzle + cur.nodeType === 1 && + jQuery.find.matchesSelector( cur, selectors ) ) ) { + + matched.push( cur ); + break; + } + } + } + } + + return this.pushStack( matched.length > 1 ? jQuery.uniqueSort( matched ) : matched ); + }, + + // Determine the position of an element within the set + index: function( elem ) { + + // No argument, return index in parent + if ( !elem ) { + return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1; + } + + // Index in selector + if ( typeof elem === "string" ) { + return indexOf.call( jQuery( elem ), this[ 0 ] ); + } + + // Locate the position of the desired element + return indexOf.call( this, + + // If it receives a jQuery object, the first element is used + elem.jquery ? elem[ 0 ] : elem + ); + }, + + add: function( selector, context ) { + return this.pushStack( + jQuery.uniqueSort( + jQuery.merge( this.get(), jQuery( selector, context ) ) + ) + ); + }, + + addBack: function( selector ) { + return this.add( selector == null ? + this.prevObject : this.prevObject.filter( selector ) + ); + } +} ); + +function sibling( cur, dir ) { + while ( ( cur = cur[ dir ] ) && cur.nodeType !== 1 ) {} + return cur; +} + +jQuery.each( { + parent: function( elem ) { + var parent = elem.parentNode; + return parent && parent.nodeType !== 11 ? parent : null; + }, + parents: function( elem ) { + return dir( elem, "parentNode" ); + }, + parentsUntil: function( elem, _i, until ) { + return dir( elem, "parentNode", until ); + }, + next: function( elem ) { + return sibling( elem, "nextSibling" ); + }, + prev: function( elem ) { + return sibling( elem, "previousSibling" ); + }, + nextAll: function( elem ) { + return dir( elem, "nextSibling" ); + }, + prevAll: function( elem ) { + return dir( elem, "previousSibling" ); + }, + nextUntil: function( elem, _i, until ) { + return dir( elem, "nextSibling", until ); + }, + prevUntil: function( elem, _i, until ) { + return dir( elem, "previousSibling", until ); + }, + siblings: function( elem ) { + return siblings( ( elem.parentNode || {} ).firstChild, elem ); + }, + children: function( elem ) { + return siblings( elem.firstChild ); + }, + contents: function( elem ) { + if ( elem.contentDocument != null && + + // Support: IE 11+ + // elements with no `data` attribute has an object + // `contentDocument` with a `null` prototype. + getProto( elem.contentDocument ) ) { + + return elem.contentDocument; + } + + // Support: IE 9 - 11 only, iOS 7 only, Android Browser <=4.3 only + // Treat the template element as a regular one in browsers that + // don't support it. + if ( nodeName( elem, "template" ) ) { + elem = elem.content || elem; + } + + return jQuery.merge( [], elem.childNodes ); + } +}, function( name, fn ) { + jQuery.fn[ name ] = function( until, selector ) { + var matched = jQuery.map( this, fn, until ); + + if ( name.slice( -5 ) !== "Until" ) { + selector = until; + } + + if ( selector && typeof selector === "string" ) { + matched = jQuery.filter( selector, matched ); + } + + if ( this.length > 1 ) { + + // Remove duplicates + if ( !guaranteedUnique[ name ] ) { + jQuery.uniqueSort( matched ); + } + + // Reverse order for parents* and prev-derivatives + if ( rparentsprev.test( name ) ) { + matched.reverse(); + } + } + + return this.pushStack( matched ); + }; +} ); +var rnothtmlwhite = ( /[^\x20\t\r\n\f]+/g ); + + + +// Convert String-formatted options into Object-formatted ones +function createOptions( options ) { + var object = {}; + jQuery.each( options.match( rnothtmlwhite ) || [], function( _, flag ) { + object[ flag ] = true; + } ); + return object; +} + +/* + * Create a callback list using the following parameters: + * + * options: an optional list of space-separated options that will change how + * the callback list behaves or a more traditional option object + * + * By default a callback list will act like an event callback list and can be + * "fired" multiple times. + * + * Possible options: + * + * once: will ensure the callback list can only be fired once (like a Deferred) + * + * memory: will keep track of previous values and will call any callback added + * after the list has been fired right away with the latest "memorized" + * values (like a Deferred) + * + * unique: will ensure a callback can only be added once (no duplicate in the list) + * + * stopOnFalse: interrupt callings when a callback returns false + * + */ +jQuery.Callbacks = function( options ) { + + // Convert options from String-formatted to Object-formatted if needed + // (we check in cache first) + options = typeof options === "string" ? + createOptions( options ) : + jQuery.extend( {}, options ); + + var // Flag to know if list is currently firing + firing, + + // Last fire value for non-forgettable lists + memory, + + // Flag to know if list was already fired + fired, + + // Flag to prevent firing + locked, + + // Actual callback list + list = [], + + // Queue of execution data for repeatable lists + queue = [], + + // Index of currently firing callback (modified by add/remove as needed) + firingIndex = -1, + + // Fire callbacks + fire = function() { + + // Enforce single-firing + locked = locked || options.once; + + // Execute callbacks for all pending executions, + // respecting firingIndex overrides and runtime changes + fired = firing = true; + for ( ; queue.length; firingIndex = -1 ) { + memory = queue.shift(); + while ( ++firingIndex < list.length ) { + + // Run callback and check for early termination + if ( list[ firingIndex ].apply( memory[ 0 ], memory[ 1 ] ) === false && + options.stopOnFalse ) { + + // Jump to end and forget the data so .add doesn't re-fire + firingIndex = list.length; + memory = false; + } + } + } + + // Forget the data if we're done with it + if ( !options.memory ) { + memory = false; + } + + firing = false; + + // Clean up if we're done firing for good + if ( locked ) { + + // Keep an empty list if we have data for future add calls + if ( memory ) { + list = []; + + // Otherwise, this object is spent + } else { + list = ""; + } + } + }, + + // Actual Callbacks object + self = { + + // Add a callback or a collection of callbacks to the list + add: function() { + if ( list ) { + + // If we have memory from a past run, we should fire after adding + if ( memory && !firing ) { + firingIndex = list.length - 1; + queue.push( memory ); + } + + ( function add( args ) { + jQuery.each( args, function( _, arg ) { + if ( isFunction( arg ) ) { + if ( !options.unique || !self.has( arg ) ) { + list.push( arg ); + } + } else if ( arg && arg.length && toType( arg ) !== "string" ) { + + // Inspect recursively + add( arg ); + } + } ); + } )( arguments ); + + if ( memory && !firing ) { + fire(); + } + } + return this; + }, + + // Remove a callback from the list + remove: function() { + jQuery.each( arguments, function( _, arg ) { + var index; + while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { + list.splice( index, 1 ); + + // Handle firing indexes + if ( index <= firingIndex ) { + firingIndex--; + } + } + } ); + return this; + }, + + // Check if a given callback is in the list. + // If no argument is given, return whether or not list has callbacks attached. + has: function( fn ) { + return fn ? + jQuery.inArray( fn, list ) > -1 : + list.length > 0; + }, + + // Remove all callbacks from the list + empty: function() { + if ( list ) { + list = []; + } + return this; + }, + + // Disable .fire and .add + // Abort any current/pending executions + // Clear all callbacks and values + disable: function() { + locked = queue = []; + list = memory = ""; + return this; + }, + disabled: function() { + return !list; + }, + + // Disable .fire + // Also disable .add unless we have memory (since it would have no effect) + // Abort any pending executions + lock: function() { + locked = queue = []; + if ( !memory && !firing ) { + list = memory = ""; + } + return this; + }, + locked: function() { + return !!locked; + }, + + // Call all callbacks with the given context and arguments + fireWith: function( context, args ) { + if ( !locked ) { + args = args || []; + args = [ context, args.slice ? args.slice() : args ]; + queue.push( args ); + if ( !firing ) { + fire(); + } + } + return this; + }, + + // Call all the callbacks with the given arguments + fire: function() { + self.fireWith( this, arguments ); + return this; + }, + + // To know if the callbacks have already been called at least once + fired: function() { + return !!fired; + } + }; + + return self; +}; + + +function Identity( v ) { + return v; +} +function Thrower( ex ) { + throw ex; +} + +function adoptValue( value, resolve, reject, noValue ) { + var method; + + try { + + // Check for promise aspect first to privilege synchronous behavior + if ( value && isFunction( ( method = value.promise ) ) ) { + method.call( value ).done( resolve ).fail( reject ); + + // Other thenables + } else if ( value && isFunction( ( method = value.then ) ) ) { + method.call( value, resolve, reject ); + + // Other non-thenables + } else { + + // Control `resolve` arguments by letting Array#slice cast boolean `noValue` to integer: + // * false: [ value ].slice( 0 ) => resolve( value ) + // * true: [ value ].slice( 1 ) => resolve() + resolve.apply( undefined, [ value ].slice( noValue ) ); + } + + // For Promises/A+, convert exceptions into rejections + // Since jQuery.when doesn't unwrap thenables, we can skip the extra checks appearing in + // Deferred#then to conditionally suppress rejection. + } catch ( value ) { + + // Support: Android 4.0 only + // Strict mode functions invoked without .call/.apply get global-object context + reject.apply( undefined, [ value ] ); + } +} + +jQuery.extend( { + + Deferred: function( func ) { + var tuples = [ + + // action, add listener, callbacks, + // ... .then handlers, argument index, [final state] + [ "notify", "progress", jQuery.Callbacks( "memory" ), + jQuery.Callbacks( "memory" ), 2 ], + [ "resolve", "done", jQuery.Callbacks( "once memory" ), + jQuery.Callbacks( "once memory" ), 0, "resolved" ], + [ "reject", "fail", jQuery.Callbacks( "once memory" ), + jQuery.Callbacks( "once memory" ), 1, "rejected" ] + ], + state = "pending", + promise = { + state: function() { + return state; + }, + always: function() { + deferred.done( arguments ).fail( arguments ); + return this; + }, + "catch": function( fn ) { + return promise.then( null, fn ); + }, + + // Keep pipe for back-compat + pipe: function( /* fnDone, fnFail, fnProgress */ ) { + var fns = arguments; + + return jQuery.Deferred( function( newDefer ) { + jQuery.each( tuples, function( _i, tuple ) { + + // Map tuples (progress, done, fail) to arguments (done, fail, progress) + var fn = isFunction( fns[ tuple[ 4 ] ] ) && fns[ tuple[ 4 ] ]; + + // deferred.progress(function() { bind to newDefer or newDefer.notify }) + // deferred.done(function() { bind to newDefer or newDefer.resolve }) + // deferred.fail(function() { bind to newDefer or newDefer.reject }) + deferred[ tuple[ 1 ] ]( function() { + var returned = fn && fn.apply( this, arguments ); + if ( returned && isFunction( returned.promise ) ) { + returned.promise() + .progress( newDefer.notify ) + .done( newDefer.resolve ) + .fail( newDefer.reject ); + } else { + newDefer[ tuple[ 0 ] + "With" ]( + this, + fn ? [ returned ] : arguments + ); + } + } ); + } ); + fns = null; + } ).promise(); + }, + then: function( onFulfilled, onRejected, onProgress ) { + var maxDepth = 0; + function resolve( depth, deferred, handler, special ) { + return function() { + var that = this, + args = arguments, + mightThrow = function() { + var returned, then; + + // Support: Promises/A+ section 2.3.3.3.3 + // https://promisesaplus.com/#point-59 + // Ignore double-resolution attempts + if ( depth < maxDepth ) { + return; + } + + returned = handler.apply( that, args ); + + // Support: Promises/A+ section 2.3.1 + // https://promisesaplus.com/#point-48 + if ( returned === deferred.promise() ) { + throw new TypeError( "Thenable self-resolution" ); + } + + // Support: Promises/A+ sections 2.3.3.1, 3.5 + // https://promisesaplus.com/#point-54 + // https://promisesaplus.com/#point-75 + // Retrieve `then` only once + then = returned && + + // Support: Promises/A+ section 2.3.4 + // https://promisesaplus.com/#point-64 + // Only check objects and functions for thenability + ( typeof returned === "object" || + typeof returned === "function" ) && + returned.then; + + // Handle a returned thenable + if ( isFunction( then ) ) { + + // Special processors (notify) just wait for resolution + if ( special ) { + then.call( + returned, + resolve( maxDepth, deferred, Identity, special ), + resolve( maxDepth, deferred, Thrower, special ) + ); + + // Normal processors (resolve) also hook into progress + } else { + + // ...and disregard older resolution values + maxDepth++; + + then.call( + returned, + resolve( maxDepth, deferred, Identity, special ), + resolve( maxDepth, deferred, Thrower, special ), + resolve( maxDepth, deferred, Identity, + deferred.notifyWith ) + ); + } + + // Handle all other returned values + } else { + + // Only substitute handlers pass on context + // and multiple values (non-spec behavior) + if ( handler !== Identity ) { + that = undefined; + args = [ returned ]; + } + + // Process the value(s) + // Default process is resolve + ( special || deferred.resolveWith )( that, args ); + } + }, + + // Only normal processors (resolve) catch and reject exceptions + process = special ? + mightThrow : + function() { + try { + mightThrow(); + } catch ( e ) { + + if ( jQuery.Deferred.exceptionHook ) { + jQuery.Deferred.exceptionHook( e, + process.stackTrace ); + } + + // Support: Promises/A+ section 2.3.3.3.4.1 + // https://promisesaplus.com/#point-61 + // Ignore post-resolution exceptions + if ( depth + 1 >= maxDepth ) { + + // Only substitute handlers pass on context + // and multiple values (non-spec behavior) + if ( handler !== Thrower ) { + that = undefined; + args = [ e ]; + } + + deferred.rejectWith( that, args ); + } + } + }; + + // Support: Promises/A+ section 2.3.3.3.1 + // https://promisesaplus.com/#point-57 + // Re-resolve promises immediately to dodge false rejection from + // subsequent errors + if ( depth ) { + process(); + } else { + + // Call an optional hook to record the stack, in case of exception + // since it's otherwise lost when execution goes async + if ( jQuery.Deferred.getStackHook ) { + process.stackTrace = jQuery.Deferred.getStackHook(); + } + window.setTimeout( process ); + } + }; + } + + return jQuery.Deferred( function( newDefer ) { + + // progress_handlers.add( ... ) + tuples[ 0 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onProgress ) ? + onProgress : + Identity, + newDefer.notifyWith + ) + ); + + // fulfilled_handlers.add( ... ) + tuples[ 1 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onFulfilled ) ? + onFulfilled : + Identity + ) + ); + + // rejected_handlers.add( ... ) + tuples[ 2 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onRejected ) ? + onRejected : + Thrower + ) + ); + } ).promise(); + }, + + // Get a promise for this deferred + // If obj is provided, the promise aspect is added to the object + promise: function( obj ) { + return obj != null ? jQuery.extend( obj, promise ) : promise; + } + }, + deferred = {}; + + // Add list-specific methods + jQuery.each( tuples, function( i, tuple ) { + var list = tuple[ 2 ], + stateString = tuple[ 5 ]; + + // promise.progress = list.add + // promise.done = list.add + // promise.fail = list.add + promise[ tuple[ 1 ] ] = list.add; + + // Handle state + if ( stateString ) { + list.add( + function() { + + // state = "resolved" (i.e., fulfilled) + // state = "rejected" + state = stateString; + }, + + // rejected_callbacks.disable + // fulfilled_callbacks.disable + tuples[ 3 - i ][ 2 ].disable, + + // rejected_handlers.disable + // fulfilled_handlers.disable + tuples[ 3 - i ][ 3 ].disable, + + // progress_callbacks.lock + tuples[ 0 ][ 2 ].lock, + + // progress_handlers.lock + tuples[ 0 ][ 3 ].lock + ); + } + + // progress_handlers.fire + // fulfilled_handlers.fire + // rejected_handlers.fire + list.add( tuple[ 3 ].fire ); + + // deferred.notify = function() { deferred.notifyWith(...) } + // deferred.resolve = function() { deferred.resolveWith(...) } + // deferred.reject = function() { deferred.rejectWith(...) } + deferred[ tuple[ 0 ] ] = function() { + deferred[ tuple[ 0 ] + "With" ]( this === deferred ? undefined : this, arguments ); + return this; + }; + + // deferred.notifyWith = list.fireWith + // deferred.resolveWith = list.fireWith + // deferred.rejectWith = list.fireWith + deferred[ tuple[ 0 ] + "With" ] = list.fireWith; + } ); + + // Make the deferred a promise + promise.promise( deferred ); + + // Call given func if any + if ( func ) { + func.call( deferred, deferred ); + } + + // All done! + return deferred; + }, + + // Deferred helper + when: function( singleValue ) { + var + + // count of uncompleted subordinates + remaining = arguments.length, + + // count of unprocessed arguments + i = remaining, + + // subordinate fulfillment data + resolveContexts = Array( i ), + resolveValues = slice.call( arguments ), + + // the master Deferred + master = jQuery.Deferred(), + + // subordinate callback factory + updateFunc = function( i ) { + return function( value ) { + resolveContexts[ i ] = this; + resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value; + if ( !( --remaining ) ) { + master.resolveWith( resolveContexts, resolveValues ); + } + }; + }; + + // Single- and empty arguments are adopted like Promise.resolve + if ( remaining <= 1 ) { + adoptValue( singleValue, master.done( updateFunc( i ) ).resolve, master.reject, + !remaining ); + + // Use .then() to unwrap secondary thenables (cf. gh-3000) + if ( master.state() === "pending" || + isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) { + + return master.then(); + } + } + + // Multiple arguments are aggregated like Promise.all array elements + while ( i-- ) { + adoptValue( resolveValues[ i ], updateFunc( i ), master.reject ); + } + + return master.promise(); + } +} ); + + +// These usually indicate a programmer mistake during development, +// warn about them ASAP rather than swallowing them by default. +var rerrorNames = /^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/; + +jQuery.Deferred.exceptionHook = function( error, stack ) { + + // Support: IE 8 - 9 only + // Console exists when dev tools are open, which can happen at any time + if ( window.console && window.console.warn && error && rerrorNames.test( error.name ) ) { + window.console.warn( "jQuery.Deferred exception: " + error.message, error.stack, stack ); + } +}; + + + + +jQuery.readyException = function( error ) { + window.setTimeout( function() { + throw error; + } ); +}; + + + + +// The deferred used on DOM ready +var readyList = jQuery.Deferred(); + +jQuery.fn.ready = function( fn ) { + + readyList + .then( fn ) + + // Wrap jQuery.readyException in a function so that the lookup + // happens at the time of error handling instead of callback + // registration. + .catch( function( error ) { + jQuery.readyException( error ); + } ); + + return this; +}; + +jQuery.extend( { + + // Is the DOM ready to be used? Set to true once it occurs. + isReady: false, + + // A counter to track how many items to wait for before + // the ready event fires. See #6781 + readyWait: 1, + + // Handle when the DOM is ready + ready: function( wait ) { + + // Abort if there are pending holds or we're already ready + if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { + return; + } + + // Remember that the DOM is ready + jQuery.isReady = true; + + // If a normal DOM Ready event fired, decrement, and wait if need be + if ( wait !== true && --jQuery.readyWait > 0 ) { + return; + } + + // If there are functions bound, to execute + readyList.resolveWith( document, [ jQuery ] ); + } +} ); + +jQuery.ready.then = readyList.then; + +// The ready event handler and self cleanup method +function completed() { + document.removeEventListener( "DOMContentLoaded", completed ); + window.removeEventListener( "load", completed ); + jQuery.ready(); +} + +// Catch cases where $(document).ready() is called +// after the browser event has already occurred. +// Support: IE <=9 - 10 only +// Older IE sometimes signals "interactive" too soon +if ( document.readyState === "complete" || + ( document.readyState !== "loading" && !document.documentElement.doScroll ) ) { + + // Handle it asynchronously to allow scripts the opportunity to delay ready + window.setTimeout( jQuery.ready ); + +} else { + + // Use the handy event callback + document.addEventListener( "DOMContentLoaded", completed ); + + // A fallback to window.onload, that will always work + window.addEventListener( "load", completed ); +} + + + + +// Multifunctional method to get and set values of a collection +// The value/s can optionally be executed if it's a function +var access = function( elems, fn, key, value, chainable, emptyGet, raw ) { + var i = 0, + len = elems.length, + bulk = key == null; + + // Sets many values + if ( toType( key ) === "object" ) { + chainable = true; + for ( i in key ) { + access( elems, fn, i, key[ i ], true, emptyGet, raw ); + } + + // Sets one value + } else if ( value !== undefined ) { + chainable = true; + + if ( !isFunction( value ) ) { + raw = true; + } + + if ( bulk ) { + + // Bulk operations run against the entire set + if ( raw ) { + fn.call( elems, value ); + fn = null; + + // ...except when executing function values + } else { + bulk = fn; + fn = function( elem, _key, value ) { + return bulk.call( jQuery( elem ), value ); + }; + } + } + + if ( fn ) { + for ( ; i < len; i++ ) { + fn( + elems[ i ], key, raw ? + value : + value.call( elems[ i ], i, fn( elems[ i ], key ) ) + ); + } + } + } + + if ( chainable ) { + return elems; + } + + // Gets + if ( bulk ) { + return fn.call( elems ); + } + + return len ? fn( elems[ 0 ], key ) : emptyGet; +}; + + +// Matches dashed string for camelizing +var rmsPrefix = /^-ms-/, + rdashAlpha = /-([a-z])/g; + +// Used by camelCase as callback to replace() +function fcamelCase( _all, letter ) { + return letter.toUpperCase(); +} + +// Convert dashed to camelCase; used by the css and data modules +// Support: IE <=9 - 11, Edge 12 - 15 +// Microsoft forgot to hump their vendor prefix (#9572) +function camelCase( string ) { + return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); +} +var acceptData = function( owner ) { + + // Accepts only: + // - Node + // - Node.ELEMENT_NODE + // - Node.DOCUMENT_NODE + // - Object + // - Any + return owner.nodeType === 1 || owner.nodeType === 9 || !( +owner.nodeType ); +}; + + + + +function Data() { + this.expando = jQuery.expando + Data.uid++; +} + +Data.uid = 1; + +Data.prototype = { + + cache: function( owner ) { + + // Check if the owner object already has a cache + var value = owner[ this.expando ]; + + // If not, create one + if ( !value ) { + value = {}; + + // We can accept data for non-element nodes in modern browsers, + // but we should not, see #8335. + // Always return an empty object. + if ( acceptData( owner ) ) { + + // If it is a node unlikely to be stringify-ed or looped over + // use plain assignment + if ( owner.nodeType ) { + owner[ this.expando ] = value; + + // Otherwise secure it in a non-enumerable property + // configurable must be true to allow the property to be + // deleted when data is removed + } else { + Object.defineProperty( owner, this.expando, { + value: value, + configurable: true + } ); + } + } + } + + return value; + }, + set: function( owner, data, value ) { + var prop, + cache = this.cache( owner ); + + // Handle: [ owner, key, value ] args + // Always use camelCase key (gh-2257) + if ( typeof data === "string" ) { + cache[ camelCase( data ) ] = value; + + // Handle: [ owner, { properties } ] args + } else { + + // Copy the properties one-by-one to the cache object + for ( prop in data ) { + cache[ camelCase( prop ) ] = data[ prop ]; + } + } + return cache; + }, + get: function( owner, key ) { + return key === undefined ? + this.cache( owner ) : + + // Always use camelCase key (gh-2257) + owner[ this.expando ] && owner[ this.expando ][ camelCase( key ) ]; + }, + access: function( owner, key, value ) { + + // In cases where either: + // + // 1. No key was specified + // 2. A string key was specified, but no value provided + // + // Take the "read" path and allow the get method to determine + // which value to return, respectively either: + // + // 1. The entire cache object + // 2. The data stored at the key + // + if ( key === undefined || + ( ( key && typeof key === "string" ) && value === undefined ) ) { + + return this.get( owner, key ); + } + + // When the key is not a string, or both a key and value + // are specified, set or extend (existing objects) with either: + // + // 1. An object of properties + // 2. A key and value + // + this.set( owner, key, value ); + + // Since the "set" path can have two possible entry points + // return the expected data based on which path was taken[*] + return value !== undefined ? value : key; + }, + remove: function( owner, key ) { + var i, + cache = owner[ this.expando ]; + + if ( cache === undefined ) { + return; + } + + if ( key !== undefined ) { + + // Support array or space separated string of keys + if ( Array.isArray( key ) ) { + + // If key is an array of keys... + // We always set camelCase keys, so remove that. + key = key.map( camelCase ); + } else { + key = camelCase( key ); + + // If a key with the spaces exists, use it. + // Otherwise, create an array by matching non-whitespace + key = key in cache ? + [ key ] : + ( key.match( rnothtmlwhite ) || [] ); + } + + i = key.length; + + while ( i-- ) { + delete cache[ key[ i ] ]; + } + } + + // Remove the expando if there's no more data + if ( key === undefined || jQuery.isEmptyObject( cache ) ) { + + // Support: Chrome <=35 - 45 + // Webkit & Blink performance suffers when deleting properties + // from DOM nodes, so set to undefined instead + // https://bugs.chromium.org/p/chromium/issues/detail?id=378607 (bug restricted) + if ( owner.nodeType ) { + owner[ this.expando ] = undefined; + } else { + delete owner[ this.expando ]; + } + } + }, + hasData: function( owner ) { + var cache = owner[ this.expando ]; + return cache !== undefined && !jQuery.isEmptyObject( cache ); + } +}; +var dataPriv = new Data(); + +var dataUser = new Data(); + + + +// Implementation Summary +// +// 1. Enforce API surface and semantic compatibility with 1.9.x branch +// 2. Improve the module's maintainability by reducing the storage +// paths to a single mechanism. +// 3. Use the same single mechanism to support "private" and "user" data. +// 4. _Never_ expose "private" data to user code (TODO: Drop _data, _removeData) +// 5. Avoid exposing implementation details on user objects (eg. expando properties) +// 6. Provide a clear path for implementation upgrade to WeakMap in 2014 + +var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/, + rmultiDash = /[A-Z]/g; + +function getData( data ) { + if ( data === "true" ) { + return true; + } + + if ( data === "false" ) { + return false; + } + + if ( data === "null" ) { + return null; + } + + // Only convert to a number if it doesn't change the string + if ( data === +data + "" ) { + return +data; + } + + if ( rbrace.test( data ) ) { + return JSON.parse( data ); + } + + return data; +} + +function dataAttr( elem, key, data ) { + var name; + + // If nothing was found internally, try to fetch any + // data from the HTML5 data-* attribute + if ( data === undefined && elem.nodeType === 1 ) { + name = "data-" + key.replace( rmultiDash, "-$&" ).toLowerCase(); + data = elem.getAttribute( name ); + + if ( typeof data === "string" ) { + try { + data = getData( data ); + } catch ( e ) {} + + // Make sure we set the data so it isn't changed later + dataUser.set( elem, key, data ); + } else { + data = undefined; + } + } + return data; +} + +jQuery.extend( { + hasData: function( elem ) { + return dataUser.hasData( elem ) || dataPriv.hasData( elem ); + }, + + data: function( elem, name, data ) { + return dataUser.access( elem, name, data ); + }, + + removeData: function( elem, name ) { + dataUser.remove( elem, name ); + }, + + // TODO: Now that all calls to _data and _removeData have been replaced + // with direct calls to dataPriv methods, these can be deprecated. + _data: function( elem, name, data ) { + return dataPriv.access( elem, name, data ); + }, + + _removeData: function( elem, name ) { + dataPriv.remove( elem, name ); + } +} ); + +jQuery.fn.extend( { + data: function( key, value ) { + var i, name, data, + elem = this[ 0 ], + attrs = elem && elem.attributes; + + // Gets all values + if ( key === undefined ) { + if ( this.length ) { + data = dataUser.get( elem ); + + if ( elem.nodeType === 1 && !dataPriv.get( elem, "hasDataAttrs" ) ) { + i = attrs.length; + while ( i-- ) { + + // Support: IE 11 only + // The attrs elements can be null (#14894) + if ( attrs[ i ] ) { + name = attrs[ i ].name; + if ( name.indexOf( "data-" ) === 0 ) { + name = camelCase( name.slice( 5 ) ); + dataAttr( elem, name, data[ name ] ); + } + } + } + dataPriv.set( elem, "hasDataAttrs", true ); + } + } + + return data; + } + + // Sets multiple values + if ( typeof key === "object" ) { + return this.each( function() { + dataUser.set( this, key ); + } ); + } + + return access( this, function( value ) { + var data; + + // The calling jQuery object (element matches) is not empty + // (and therefore has an element appears at this[ 0 ]) and the + // `value` parameter was not undefined. An empty jQuery object + // will result in `undefined` for elem = this[ 0 ] which will + // throw an exception if an attempt to read a data cache is made. + if ( elem && value === undefined ) { + + // Attempt to get data from the cache + // The key will always be camelCased in Data + data = dataUser.get( elem, key ); + if ( data !== undefined ) { + return data; + } + + // Attempt to "discover" the data in + // HTML5 custom data-* attrs + data = dataAttr( elem, key ); + if ( data !== undefined ) { + return data; + } + + // We tried really hard, but the data doesn't exist. + return; + } + + // Set the data... + this.each( function() { + + // We always store the camelCased key + dataUser.set( this, key, value ); + } ); + }, null, value, arguments.length > 1, null, true ); + }, + + removeData: function( key ) { + return this.each( function() { + dataUser.remove( this, key ); + } ); + } +} ); + + +jQuery.extend( { + queue: function( elem, type, data ) { + var queue; + + if ( elem ) { + type = ( type || "fx" ) + "queue"; + queue = dataPriv.get( elem, type ); + + // Speed up dequeue by getting out quickly if this is just a lookup + if ( data ) { + if ( !queue || Array.isArray( data ) ) { + queue = dataPriv.access( elem, type, jQuery.makeArray( data ) ); + } else { + queue.push( data ); + } + } + return queue || []; + } + }, + + dequeue: function( elem, type ) { + type = type || "fx"; + + var queue = jQuery.queue( elem, type ), + startLength = queue.length, + fn = queue.shift(), + hooks = jQuery._queueHooks( elem, type ), + next = function() { + jQuery.dequeue( elem, type ); + }; + + // If the fx queue is dequeued, always remove the progress sentinel + if ( fn === "inprogress" ) { + fn = queue.shift(); + startLength--; + } + + if ( fn ) { + + // Add a progress sentinel to prevent the fx queue from being + // automatically dequeued + if ( type === "fx" ) { + queue.unshift( "inprogress" ); + } + + // Clear up the last queue stop function + delete hooks.stop; + fn.call( elem, next, hooks ); + } + + if ( !startLength && hooks ) { + hooks.empty.fire(); + } + }, + + // Not public - generate a queueHooks object, or return the current one + _queueHooks: function( elem, type ) { + var key = type + "queueHooks"; + return dataPriv.get( elem, key ) || dataPriv.access( elem, key, { + empty: jQuery.Callbacks( "once memory" ).add( function() { + dataPriv.remove( elem, [ type + "queue", key ] ); + } ) + } ); + } +} ); + +jQuery.fn.extend( { + queue: function( type, data ) { + var setter = 2; + + if ( typeof type !== "string" ) { + data = type; + type = "fx"; + setter--; + } + + if ( arguments.length < setter ) { + return jQuery.queue( this[ 0 ], type ); + } + + return data === undefined ? + this : + this.each( function() { + var queue = jQuery.queue( this, type, data ); + + // Ensure a hooks for this queue + jQuery._queueHooks( this, type ); + + if ( type === "fx" && queue[ 0 ] !== "inprogress" ) { + jQuery.dequeue( this, type ); + } + } ); + }, + dequeue: function( type ) { + return this.each( function() { + jQuery.dequeue( this, type ); + } ); + }, + clearQueue: function( type ) { + return this.queue( type || "fx", [] ); + }, + + // Get a promise resolved when queues of a certain type + // are emptied (fx is the type by default) + promise: function( type, obj ) { + var tmp, + count = 1, + defer = jQuery.Deferred(), + elements = this, + i = this.length, + resolve = function() { + if ( !( --count ) ) { + defer.resolveWith( elements, [ elements ] ); + } + }; + + if ( typeof type !== "string" ) { + obj = type; + type = undefined; + } + type = type || "fx"; + + while ( i-- ) { + tmp = dataPriv.get( elements[ i ], type + "queueHooks" ); + if ( tmp && tmp.empty ) { + count++; + tmp.empty.add( resolve ); + } + } + resolve(); + return defer.promise( obj ); + } +} ); +var pnum = ( /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/ ).source; + +var rcssNum = new RegExp( "^(?:([+-])=|)(" + pnum + ")([a-z%]*)$", "i" ); + + +var cssExpand = [ "Top", "Right", "Bottom", "Left" ]; + +var documentElement = document.documentElement; + + + + var isAttached = function( elem ) { + return jQuery.contains( elem.ownerDocument, elem ); + }, + composed = { composed: true }; + + // Support: IE 9 - 11+, Edge 12 - 18+, iOS 10.0 - 10.2 only + // Check attachment across shadow DOM boundaries when possible (gh-3504) + // Support: iOS 10.0-10.2 only + // Early iOS 10 versions support `attachShadow` but not `getRootNode`, + // leading to errors. We need to check for `getRootNode`. + if ( documentElement.getRootNode ) { + isAttached = function( elem ) { + return jQuery.contains( elem.ownerDocument, elem ) || + elem.getRootNode( composed ) === elem.ownerDocument; + }; + } +var isHiddenWithinTree = function( elem, el ) { + + // isHiddenWithinTree might be called from jQuery#filter function; + // in that case, element will be second argument + elem = el || elem; + + // Inline style trumps all + return elem.style.display === "none" || + elem.style.display === "" && + + // Otherwise, check computed style + // Support: Firefox <=43 - 45 + // Disconnected elements can have computed display: none, so first confirm that elem is + // in the document. + isAttached( elem ) && + + jQuery.css( elem, "display" ) === "none"; + }; + + + +function adjustCSS( elem, prop, valueParts, tween ) { + var adjusted, scale, + maxIterations = 20, + currentValue = tween ? + function() { + return tween.cur(); + } : + function() { + return jQuery.css( elem, prop, "" ); + }, + initial = currentValue(), + unit = valueParts && valueParts[ 3 ] || ( jQuery.cssNumber[ prop ] ? "" : "px" ), + + // Starting value computation is required for potential unit mismatches + initialInUnit = elem.nodeType && + ( jQuery.cssNumber[ prop ] || unit !== "px" && +initial ) && + rcssNum.exec( jQuery.css( elem, prop ) ); + + if ( initialInUnit && initialInUnit[ 3 ] !== unit ) { + + // Support: Firefox <=54 + // Halve the iteration target value to prevent interference from CSS upper bounds (gh-2144) + initial = initial / 2; + + // Trust units reported by jQuery.css + unit = unit || initialInUnit[ 3 ]; + + // Iteratively approximate from a nonzero starting point + initialInUnit = +initial || 1; + + while ( maxIterations-- ) { + + // Evaluate and update our best guess (doubling guesses that zero out). + // Finish if the scale equals or crosses 1 (making the old*new product non-positive). + jQuery.style( elem, prop, initialInUnit + unit ); + if ( ( 1 - scale ) * ( 1 - ( scale = currentValue() / initial || 0.5 ) ) <= 0 ) { + maxIterations = 0; + } + initialInUnit = initialInUnit / scale; + + } + + initialInUnit = initialInUnit * 2; + jQuery.style( elem, prop, initialInUnit + unit ); + + // Make sure we update the tween properties later on + valueParts = valueParts || []; + } + + if ( valueParts ) { + initialInUnit = +initialInUnit || +initial || 0; + + // Apply relative offset (+=/-=) if specified + adjusted = valueParts[ 1 ] ? + initialInUnit + ( valueParts[ 1 ] + 1 ) * valueParts[ 2 ] : + +valueParts[ 2 ]; + if ( tween ) { + tween.unit = unit; + tween.start = initialInUnit; + tween.end = adjusted; + } + } + return adjusted; +} + + +var defaultDisplayMap = {}; + +function getDefaultDisplay( elem ) { + var temp, + doc = elem.ownerDocument, + nodeName = elem.nodeName, + display = defaultDisplayMap[ nodeName ]; + + if ( display ) { + return display; + } + + temp = doc.body.appendChild( doc.createElement( nodeName ) ); + display = jQuery.css( temp, "display" ); + + temp.parentNode.removeChild( temp ); + + if ( display === "none" ) { + display = "block"; + } + defaultDisplayMap[ nodeName ] = display; + + return display; +} + +function showHide( elements, show ) { + var display, elem, + values = [], + index = 0, + length = elements.length; + + // Determine new display value for elements that need to change + for ( ; index < length; index++ ) { + elem = elements[ index ]; + if ( !elem.style ) { + continue; + } + + display = elem.style.display; + if ( show ) { + + // Since we force visibility upon cascade-hidden elements, an immediate (and slow) + // check is required in this first loop unless we have a nonempty display value (either + // inline or about-to-be-restored) + if ( display === "none" ) { + values[ index ] = dataPriv.get( elem, "display" ) || null; + if ( !values[ index ] ) { + elem.style.display = ""; + } + } + if ( elem.style.display === "" && isHiddenWithinTree( elem ) ) { + values[ index ] = getDefaultDisplay( elem ); + } + } else { + if ( display !== "none" ) { + values[ index ] = "none"; + + // Remember what we're overwriting + dataPriv.set( elem, "display", display ); + } + } + } + + // Set the display of the elements in a second loop to avoid constant reflow + for ( index = 0; index < length; index++ ) { + if ( values[ index ] != null ) { + elements[ index ].style.display = values[ index ]; + } + } + + return elements; +} + +jQuery.fn.extend( { + show: function() { + return showHide( this, true ); + }, + hide: function() { + return showHide( this ); + }, + toggle: function( state ) { + if ( typeof state === "boolean" ) { + return state ? this.show() : this.hide(); + } + + return this.each( function() { + if ( isHiddenWithinTree( this ) ) { + jQuery( this ).show(); + } else { + jQuery( this ).hide(); + } + } ); + } +} ); +var rcheckableType = ( /^(?:checkbox|radio)$/i ); + +var rtagName = ( /<([a-z][^\/\0>\x20\t\r\n\f]*)/i ); + +var rscriptType = ( /^$|^module$|\/(?:java|ecma)script/i ); + + + +( function() { + var fragment = document.createDocumentFragment(), + div = fragment.appendChild( document.createElement( "div" ) ), + input = document.createElement( "input" ); + + // Support: Android 4.0 - 4.3 only + // Check state lost if the name is set (#11217) + // Support: Windows Web Apps (WWA) + // `name` and `type` must use .setAttribute for WWA (#14901) + input.setAttribute( "type", "radio" ); + input.setAttribute( "checked", "checked" ); + input.setAttribute( "name", "t" ); + + div.appendChild( input ); + + // Support: Android <=4.1 only + // Older WebKit doesn't clone checked state correctly in fragments + support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked; + + // Support: IE <=11 only + // Make sure textarea (and checkbox) defaultValue is properly cloned + div.innerHTML = ""; + support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue; + + // Support: IE <=9 only + // IE <=9 replaces "; + support.option = !!div.lastChild; +} )(); + + +// We have to close these tags to support XHTML (#13200) +var wrapMap = { + + // XHTML parsers do not magically insert elements in the + // same way that tag soup parsers do. So we cannot shorten + // this by omitting or other required elements. + thead: [ 1, "", "
" ], + col: [ 2, "", "
" ], + tr: [ 2, "", "
" ], + td: [ 3, "", "
" ], + + _default: [ 0, "", "" ] +}; + +wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; +wrapMap.th = wrapMap.td; + +// Support: IE <=9 only +if ( !support.option ) { + wrapMap.optgroup = wrapMap.option = [ 1, "" ]; +} + + +function getAll( context, tag ) { + + // Support: IE <=9 - 11 only + // Use typeof to avoid zero-argument method invocation on host objects (#15151) + var ret; + + if ( typeof context.getElementsByTagName !== "undefined" ) { + ret = context.getElementsByTagName( tag || "*" ); + + } else if ( typeof context.querySelectorAll !== "undefined" ) { + ret = context.querySelectorAll( tag || "*" ); + + } else { + ret = []; + } + + if ( tag === undefined || tag && nodeName( context, tag ) ) { + return jQuery.merge( [ context ], ret ); + } + + return ret; +} + + +// Mark scripts as having already been evaluated +function setGlobalEval( elems, refElements ) { + var i = 0, + l = elems.length; + + for ( ; i < l; i++ ) { + dataPriv.set( + elems[ i ], + "globalEval", + !refElements || dataPriv.get( refElements[ i ], "globalEval" ) + ); + } +} + + +var rhtml = /<|&#?\w+;/; + +function buildFragment( elems, context, scripts, selection, ignored ) { + var elem, tmp, tag, wrap, attached, j, + fragment = context.createDocumentFragment(), + nodes = [], + i = 0, + l = elems.length; + + for ( ; i < l; i++ ) { + elem = elems[ i ]; + + if ( elem || elem === 0 ) { + + // Add nodes directly + if ( toType( elem ) === "object" ) { + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); + + // Convert non-html into a text node + } else if ( !rhtml.test( elem ) ) { + nodes.push( context.createTextNode( elem ) ); + + // Convert html into DOM nodes + } else { + tmp = tmp || fragment.appendChild( context.createElement( "div" ) ); + + // Deserialize a standard representation + tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase(); + wrap = wrapMap[ tag ] || wrapMap._default; + tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ]; + + // Descend through wrappers to the right content + j = wrap[ 0 ]; + while ( j-- ) { + tmp = tmp.lastChild; + } + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( nodes, tmp.childNodes ); + + // Remember the top-level container + tmp = fragment.firstChild; + + // Ensure the created nodes are orphaned (#12392) + tmp.textContent = ""; + } + } + } + + // Remove wrapper from fragment + fragment.textContent = ""; + + i = 0; + while ( ( elem = nodes[ i++ ] ) ) { + + // Skip elements already in the context collection (trac-4087) + if ( selection && jQuery.inArray( elem, selection ) > -1 ) { + if ( ignored ) { + ignored.push( elem ); + } + continue; + } + + attached = isAttached( elem ); + + // Append to fragment + tmp = getAll( fragment.appendChild( elem ), "script" ); + + // Preserve script evaluation history + if ( attached ) { + setGlobalEval( tmp ); + } + + // Capture executables + if ( scripts ) { + j = 0; + while ( ( elem = tmp[ j++ ] ) ) { + if ( rscriptType.test( elem.type || "" ) ) { + scripts.push( elem ); + } + } + } + } + + return fragment; +} + + +var + rkeyEvent = /^key/, + rmouseEvent = /^(?:mouse|pointer|contextmenu|drag|drop)|click/, + rtypenamespace = /^([^.]*)(?:\.(.+)|)/; + +function returnTrue() { + return true; +} + +function returnFalse() { + return false; +} + +// Support: IE <=9 - 11+ +// focus() and blur() are asynchronous, except when they are no-op. +// So expect focus to be synchronous when the element is already active, +// and blur to be synchronous when the element is not already active. +// (focus and blur are always synchronous in other supported browsers, +// this just defines when we can count on it). +function expectSync( elem, type ) { + return ( elem === safeActiveElement() ) === ( type === "focus" ); +} + +// Support: IE <=9 only +// Accessing document.activeElement can throw unexpectedly +// https://bugs.jquery.com/ticket/13393 +function safeActiveElement() { + try { + return document.activeElement; + } catch ( err ) { } +} + +function on( elem, types, selector, data, fn, one ) { + var origFn, type; + + // Types can be a map of types/handlers + if ( typeof types === "object" ) { + + // ( types-Object, selector, data ) + if ( typeof selector !== "string" ) { + + // ( types-Object, data ) + data = data || selector; + selector = undefined; + } + for ( type in types ) { + on( elem, type, selector, data, types[ type ], one ); + } + return elem; + } + + if ( data == null && fn == null ) { + + // ( types, fn ) + fn = selector; + data = selector = undefined; + } else if ( fn == null ) { + if ( typeof selector === "string" ) { + + // ( types, selector, fn ) + fn = data; + data = undefined; + } else { + + // ( types, data, fn ) + fn = data; + data = selector; + selector = undefined; + } + } + if ( fn === false ) { + fn = returnFalse; + } else if ( !fn ) { + return elem; + } + + if ( one === 1 ) { + origFn = fn; + fn = function( event ) { + + // Can use an empty set, since event contains the info + jQuery().off( event ); + return origFn.apply( this, arguments ); + }; + + // Use same guid so caller can remove using origFn + fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); + } + return elem.each( function() { + jQuery.event.add( this, types, fn, data, selector ); + } ); +} + +/* + * Helper functions for managing events -- not part of the public interface. + * Props to Dean Edwards' addEvent library for many of the ideas. + */ +jQuery.event = { + + global: {}, + + add: function( elem, types, handler, data, selector ) { + + var handleObjIn, eventHandle, tmp, + events, t, handleObj, + special, handlers, type, namespaces, origType, + elemData = dataPriv.get( elem ); + + // Only attach events to objects that accept data + if ( !acceptData( elem ) ) { + return; + } + + // Caller can pass in an object of custom data in lieu of the handler + if ( handler.handler ) { + handleObjIn = handler; + handler = handleObjIn.handler; + selector = handleObjIn.selector; + } + + // Ensure that invalid selectors throw exceptions at attach time + // Evaluate against documentElement in case elem is a non-element node (e.g., document) + if ( selector ) { + jQuery.find.matchesSelector( documentElement, selector ); + } + + // Make sure that the handler has a unique ID, used to find/remove it later + if ( !handler.guid ) { + handler.guid = jQuery.guid++; + } + + // Init the element's event structure and main handler, if this is the first + if ( !( events = elemData.events ) ) { + events = elemData.events = Object.create( null ); + } + if ( !( eventHandle = elemData.handle ) ) { + eventHandle = elemData.handle = function( e ) { + + // Discard the second event of a jQuery.event.trigger() and + // when an event is called after a page has unloaded + return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? + jQuery.event.dispatch.apply( elem, arguments ) : undefined; + }; + } + + // Handle multiple events separated by a space + types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[ t ] ) || []; + type = origType = tmp[ 1 ]; + namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); + + // There *must* be a type, no attaching namespace-only handlers + if ( !type ) { + continue; + } + + // If event changes its type, use the special event handlers for the changed type + special = jQuery.event.special[ type ] || {}; + + // If selector defined, determine special event api type, otherwise given type + type = ( selector ? special.delegateType : special.bindType ) || type; + + // Update special based on newly reset type + special = jQuery.event.special[ type ] || {}; + + // handleObj is passed to all event handlers + handleObj = jQuery.extend( { + type: type, + origType: origType, + data: data, + handler: handler, + guid: handler.guid, + selector: selector, + needsContext: selector && jQuery.expr.match.needsContext.test( selector ), + namespace: namespaces.join( "." ) + }, handleObjIn ); + + // Init the event handler queue if we're the first + if ( !( handlers = events[ type ] ) ) { + handlers = events[ type ] = []; + handlers.delegateCount = 0; + + // Only use addEventListener if the special events handler returns false + if ( !special.setup || + special.setup.call( elem, data, namespaces, eventHandle ) === false ) { + + if ( elem.addEventListener ) { + elem.addEventListener( type, eventHandle ); + } + } + } + + if ( special.add ) { + special.add.call( elem, handleObj ); + + if ( !handleObj.handler.guid ) { + handleObj.handler.guid = handler.guid; + } + } + + // Add to the element's handler list, delegates in front + if ( selector ) { + handlers.splice( handlers.delegateCount++, 0, handleObj ); + } else { + handlers.push( handleObj ); + } + + // Keep track of which events have ever been used, for event optimization + jQuery.event.global[ type ] = true; + } + + }, + + // Detach an event or set of events from an element + remove: function( elem, types, handler, selector, mappedTypes ) { + + var j, origCount, tmp, + events, t, handleObj, + special, handlers, type, namespaces, origType, + elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); + + if ( !elemData || !( events = elemData.events ) ) { + return; + } + + // Once for each type.namespace in types; type may be omitted + types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[ t ] ) || []; + type = origType = tmp[ 1 ]; + namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); + + // Unbind all events (on this namespace, if provided) for the element + if ( !type ) { + for ( type in events ) { + jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); + } + continue; + } + + special = jQuery.event.special[ type ] || {}; + type = ( selector ? special.delegateType : special.bindType ) || type; + handlers = events[ type ] || []; + tmp = tmp[ 2 ] && + new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); + + // Remove matching events + origCount = j = handlers.length; + while ( j-- ) { + handleObj = handlers[ j ]; + + if ( ( mappedTypes || origType === handleObj.origType ) && + ( !handler || handler.guid === handleObj.guid ) && + ( !tmp || tmp.test( handleObj.namespace ) ) && + ( !selector || selector === handleObj.selector || + selector === "**" && handleObj.selector ) ) { + handlers.splice( j, 1 ); + + if ( handleObj.selector ) { + handlers.delegateCount--; + } + if ( special.remove ) { + special.remove.call( elem, handleObj ); + } + } + } + + // Remove generic event handler if we removed something and no more handlers exist + // (avoids potential for endless recursion during removal of special event handlers) + if ( origCount && !handlers.length ) { + if ( !special.teardown || + special.teardown.call( elem, namespaces, elemData.handle ) === false ) { + + jQuery.removeEvent( elem, type, elemData.handle ); + } + + delete events[ type ]; + } + } + + // Remove data and the expando if it's no longer used + if ( jQuery.isEmptyObject( events ) ) { + dataPriv.remove( elem, "handle events" ); + } + }, + + dispatch: function( nativeEvent ) { + + var i, j, ret, matched, handleObj, handlerQueue, + args = new Array( arguments.length ), + + // Make a writable jQuery.Event from the native event object + event = jQuery.event.fix( nativeEvent ), + + handlers = ( + dataPriv.get( this, "events" ) || Object.create( null ) + )[ event.type ] || [], + special = jQuery.event.special[ event.type ] || {}; + + // Use the fix-ed jQuery.Event rather than the (read-only) native event + args[ 0 ] = event; + + for ( i = 1; i < arguments.length; i++ ) { + args[ i ] = arguments[ i ]; + } + + event.delegateTarget = this; + + // Call the preDispatch hook for the mapped type, and let it bail if desired + if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { + return; + } + + // Determine handlers + handlerQueue = jQuery.event.handlers.call( this, event, handlers ); + + // Run delegates first; they may want to stop propagation beneath us + i = 0; + while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) { + event.currentTarget = matched.elem; + + j = 0; + while ( ( handleObj = matched.handlers[ j++ ] ) && + !event.isImmediatePropagationStopped() ) { + + // If the event is namespaced, then each handler is only invoked if it is + // specially universal or its namespaces are a superset of the event's. + if ( !event.rnamespace || handleObj.namespace === false || + event.rnamespace.test( handleObj.namespace ) ) { + + event.handleObj = handleObj; + event.data = handleObj.data; + + ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle || + handleObj.handler ).apply( matched.elem, args ); + + if ( ret !== undefined ) { + if ( ( event.result = ret ) === false ) { + event.preventDefault(); + event.stopPropagation(); + } + } + } + } + } + + // Call the postDispatch hook for the mapped type + if ( special.postDispatch ) { + special.postDispatch.call( this, event ); + } + + return event.result; + }, + + handlers: function( event, handlers ) { + var i, handleObj, sel, matchedHandlers, matchedSelectors, + handlerQueue = [], + delegateCount = handlers.delegateCount, + cur = event.target; + + // Find delegate handlers + if ( delegateCount && + + // Support: IE <=9 + // Black-hole SVG instance trees (trac-13180) + cur.nodeType && + + // Support: Firefox <=42 + // Suppress spec-violating clicks indicating a non-primary pointer button (trac-3861) + // https://www.w3.org/TR/DOM-Level-3-Events/#event-type-click + // Support: IE 11 only + // ...but not arrow key "clicks" of radio inputs, which can have `button` -1 (gh-2343) + !( event.type === "click" && event.button >= 1 ) ) { + + for ( ; cur !== this; cur = cur.parentNode || this ) { + + // Don't check non-elements (#13208) + // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) + if ( cur.nodeType === 1 && !( event.type === "click" && cur.disabled === true ) ) { + matchedHandlers = []; + matchedSelectors = {}; + for ( i = 0; i < delegateCount; i++ ) { + handleObj = handlers[ i ]; + + // Don't conflict with Object.prototype properties (#13203) + sel = handleObj.selector + " "; + + if ( matchedSelectors[ sel ] === undefined ) { + matchedSelectors[ sel ] = handleObj.needsContext ? + jQuery( sel, this ).index( cur ) > -1 : + jQuery.find( sel, this, null, [ cur ] ).length; + } + if ( matchedSelectors[ sel ] ) { + matchedHandlers.push( handleObj ); + } + } + if ( matchedHandlers.length ) { + handlerQueue.push( { elem: cur, handlers: matchedHandlers } ); + } + } + } + } + + // Add the remaining (directly-bound) handlers + cur = this; + if ( delegateCount < handlers.length ) { + handlerQueue.push( { elem: cur, handlers: handlers.slice( delegateCount ) } ); + } + + return handlerQueue; + }, + + addProp: function( name, hook ) { + Object.defineProperty( jQuery.Event.prototype, name, { + enumerable: true, + configurable: true, + + get: isFunction( hook ) ? + function() { + if ( this.originalEvent ) { + return hook( this.originalEvent ); + } + } : + function() { + if ( this.originalEvent ) { + return this.originalEvent[ name ]; + } + }, + + set: function( value ) { + Object.defineProperty( this, name, { + enumerable: true, + configurable: true, + writable: true, + value: value + } ); + } + } ); + }, + + fix: function( originalEvent ) { + return originalEvent[ jQuery.expando ] ? + originalEvent : + new jQuery.Event( originalEvent ); + }, + + special: { + load: { + + // Prevent triggered image.load events from bubbling to window.load + noBubble: true + }, + click: { + + // Utilize native event to ensure correct state for checkable inputs + setup: function( data ) { + + // For mutual compressibility with _default, replace `this` access with a local var. + // `|| data` is dead code meant only to preserve the variable through minification. + var el = this || data; + + // Claim the first handler + if ( rcheckableType.test( el.type ) && + el.click && nodeName( el, "input" ) ) { + + // dataPriv.set( el, "click", ... ) + leverageNative( el, "click", returnTrue ); + } + + // Return false to allow normal processing in the caller + return false; + }, + trigger: function( data ) { + + // For mutual compressibility with _default, replace `this` access with a local var. + // `|| data` is dead code meant only to preserve the variable through minification. + var el = this || data; + + // Force setup before triggering a click + if ( rcheckableType.test( el.type ) && + el.click && nodeName( el, "input" ) ) { + + leverageNative( el, "click" ); + } + + // Return non-false to allow normal event-path propagation + return true; + }, + + // For cross-browser consistency, suppress native .click() on links + // Also prevent it if we're currently inside a leveraged native-event stack + _default: function( event ) { + var target = event.target; + return rcheckableType.test( target.type ) && + target.click && nodeName( target, "input" ) && + dataPriv.get( target, "click" ) || + nodeName( target, "a" ); + } + }, + + beforeunload: { + postDispatch: function( event ) { + + // Support: Firefox 20+ + // Firefox doesn't alert if the returnValue field is not set. + if ( event.result !== undefined && event.originalEvent ) { + event.originalEvent.returnValue = event.result; + } + } + } + } +}; + +// Ensure the presence of an event listener that handles manually-triggered +// synthetic events by interrupting progress until reinvoked in response to +// *native* events that it fires directly, ensuring that state changes have +// already occurred before other listeners are invoked. +function leverageNative( el, type, expectSync ) { + + // Missing expectSync indicates a trigger call, which must force setup through jQuery.event.add + if ( !expectSync ) { + if ( dataPriv.get( el, type ) === undefined ) { + jQuery.event.add( el, type, returnTrue ); + } + return; + } + + // Register the controller as a special universal handler for all event namespaces + dataPriv.set( el, type, false ); + jQuery.event.add( el, type, { + namespace: false, + handler: function( event ) { + var notAsync, result, + saved = dataPriv.get( this, type ); + + if ( ( event.isTrigger & 1 ) && this[ type ] ) { + + // Interrupt processing of the outer synthetic .trigger()ed event + // Saved data should be false in such cases, but might be a leftover capture object + // from an async native handler (gh-4350) + if ( !saved.length ) { + + // Store arguments for use when handling the inner native event + // There will always be at least one argument (an event object), so this array + // will not be confused with a leftover capture object. + saved = slice.call( arguments ); + dataPriv.set( this, type, saved ); + + // Trigger the native event and capture its result + // Support: IE <=9 - 11+ + // focus() and blur() are asynchronous + notAsync = expectSync( this, type ); + this[ type ](); + result = dataPriv.get( this, type ); + if ( saved !== result || notAsync ) { + dataPriv.set( this, type, false ); + } else { + result = {}; + } + if ( saved !== result ) { + + // Cancel the outer synthetic event + event.stopImmediatePropagation(); + event.preventDefault(); + return result.value; + } + + // If this is an inner synthetic event for an event with a bubbling surrogate + // (focus or blur), assume that the surrogate already propagated from triggering the + // native event and prevent that from happening again here. + // This technically gets the ordering wrong w.r.t. to `.trigger()` (in which the + // bubbling surrogate propagates *after* the non-bubbling base), but that seems + // less bad than duplication. + } else if ( ( jQuery.event.special[ type ] || {} ).delegateType ) { + event.stopPropagation(); + } + + // If this is a native event triggered above, everything is now in order + // Fire an inner synthetic event with the original arguments + } else if ( saved.length ) { + + // ...and capture the result + dataPriv.set( this, type, { + value: jQuery.event.trigger( + + // Support: IE <=9 - 11+ + // Extend with the prototype to reset the above stopImmediatePropagation() + jQuery.extend( saved[ 0 ], jQuery.Event.prototype ), + saved.slice( 1 ), + this + ) + } ); + + // Abort handling of the native event + event.stopImmediatePropagation(); + } + } + } ); +} + +jQuery.removeEvent = function( elem, type, handle ) { + + // This "if" is needed for plain objects + if ( elem.removeEventListener ) { + elem.removeEventListener( type, handle ); + } +}; + +jQuery.Event = function( src, props ) { + + // Allow instantiation without the 'new' keyword + if ( !( this instanceof jQuery.Event ) ) { + return new jQuery.Event( src, props ); + } + + // Event object + if ( src && src.type ) { + this.originalEvent = src; + this.type = src.type; + + // Events bubbling up the document may have been marked as prevented + // by a handler lower down the tree; reflect the correct value. + this.isDefaultPrevented = src.defaultPrevented || + src.defaultPrevented === undefined && + + // Support: Android <=2.3 only + src.returnValue === false ? + returnTrue : + returnFalse; + + // Create target properties + // Support: Safari <=6 - 7 only + // Target should not be a text node (#504, #13143) + this.target = ( src.target && src.target.nodeType === 3 ) ? + src.target.parentNode : + src.target; + + this.currentTarget = src.currentTarget; + this.relatedTarget = src.relatedTarget; + + // Event type + } else { + this.type = src; + } + + // Put explicitly provided properties onto the event object + if ( props ) { + jQuery.extend( this, props ); + } + + // Create a timestamp if incoming event doesn't have one + this.timeStamp = src && src.timeStamp || Date.now(); + + // Mark it as fixed + this[ jQuery.expando ] = true; +}; + +// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding +// https://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html +jQuery.Event.prototype = { + constructor: jQuery.Event, + isDefaultPrevented: returnFalse, + isPropagationStopped: returnFalse, + isImmediatePropagationStopped: returnFalse, + isSimulated: false, + + preventDefault: function() { + var e = this.originalEvent; + + this.isDefaultPrevented = returnTrue; + + if ( e && !this.isSimulated ) { + e.preventDefault(); + } + }, + stopPropagation: function() { + var e = this.originalEvent; + + this.isPropagationStopped = returnTrue; + + if ( e && !this.isSimulated ) { + e.stopPropagation(); + } + }, + stopImmediatePropagation: function() { + var e = this.originalEvent; + + this.isImmediatePropagationStopped = returnTrue; + + if ( e && !this.isSimulated ) { + e.stopImmediatePropagation(); + } + + this.stopPropagation(); + } +}; + +// Includes all common event props including KeyEvent and MouseEvent specific props +jQuery.each( { + altKey: true, + bubbles: true, + cancelable: true, + changedTouches: true, + ctrlKey: true, + detail: true, + eventPhase: true, + metaKey: true, + pageX: true, + pageY: true, + shiftKey: true, + view: true, + "char": true, + code: true, + charCode: true, + key: true, + keyCode: true, + button: true, + buttons: true, + clientX: true, + clientY: true, + offsetX: true, + offsetY: true, + pointerId: true, + pointerType: true, + screenX: true, + screenY: true, + targetTouches: true, + toElement: true, + touches: true, + + which: function( event ) { + var button = event.button; + + // Add which for key events + if ( event.which == null && rkeyEvent.test( event.type ) ) { + return event.charCode != null ? event.charCode : event.keyCode; + } + + // Add which for click: 1 === left; 2 === middle; 3 === right + if ( !event.which && button !== undefined && rmouseEvent.test( event.type ) ) { + if ( button & 1 ) { + return 1; + } + + if ( button & 2 ) { + return 3; + } + + if ( button & 4 ) { + return 2; + } + + return 0; + } + + return event.which; + } +}, jQuery.event.addProp ); + +jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateType ) { + jQuery.event.special[ type ] = { + + // Utilize native event if possible so blur/focus sequence is correct + setup: function() { + + // Claim the first handler + // dataPriv.set( this, "focus", ... ) + // dataPriv.set( this, "blur", ... ) + leverageNative( this, type, expectSync ); + + // Return false to allow normal processing in the caller + return false; + }, + trigger: function() { + + // Force setup before trigger + leverageNative( this, type ); + + // Return non-false to allow normal event-path propagation + return true; + }, + + delegateType: delegateType + }; +} ); + +// Create mouseenter/leave events using mouseover/out and event-time checks +// so that event delegation works in jQuery. +// Do the same for pointerenter/pointerleave and pointerover/pointerout +// +// Support: Safari 7 only +// Safari sends mouseenter too often; see: +// https://bugs.chromium.org/p/chromium/issues/detail?id=470258 +// for the description of the bug (it existed in older Chrome versions as well). +jQuery.each( { + mouseenter: "mouseover", + mouseleave: "mouseout", + pointerenter: "pointerover", + pointerleave: "pointerout" +}, function( orig, fix ) { + jQuery.event.special[ orig ] = { + delegateType: fix, + bindType: fix, + + handle: function( event ) { + var ret, + target = this, + related = event.relatedTarget, + handleObj = event.handleObj; + + // For mouseenter/leave call the handler if related is outside the target. + // NB: No relatedTarget if the mouse left/entered the browser window + if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) { + event.type = handleObj.origType; + ret = handleObj.handler.apply( this, arguments ); + event.type = fix; + } + return ret; + } + }; +} ); + +jQuery.fn.extend( { + + on: function( types, selector, data, fn ) { + return on( this, types, selector, data, fn ); + }, + one: function( types, selector, data, fn ) { + return on( this, types, selector, data, fn, 1 ); + }, + off: function( types, selector, fn ) { + var handleObj, type; + if ( types && types.preventDefault && types.handleObj ) { + + // ( event ) dispatched jQuery.Event + handleObj = types.handleObj; + jQuery( types.delegateTarget ).off( + handleObj.namespace ? + handleObj.origType + "." + handleObj.namespace : + handleObj.origType, + handleObj.selector, + handleObj.handler + ); + return this; + } + if ( typeof types === "object" ) { + + // ( types-object [, selector] ) + for ( type in types ) { + this.off( type, selector, types[ type ] ); + } + return this; + } + if ( selector === false || typeof selector === "function" ) { + + // ( types [, fn] ) + fn = selector; + selector = undefined; + } + if ( fn === false ) { + fn = returnFalse; + } + return this.each( function() { + jQuery.event.remove( this, types, fn, selector ); + } ); + } +} ); + + +var + + // Support: IE <=10 - 11, Edge 12 - 13 only + // In IE/Edge using regex groups here causes severe slowdowns. + // See https://connect.microsoft.com/IE/feedback/details/1736512/ + rnoInnerhtml = /\s*$/g; + +// Prefer a tbody over its parent table for containing new rows +function manipulationTarget( elem, content ) { + if ( nodeName( elem, "table" ) && + nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ) { + + return jQuery( elem ).children( "tbody" )[ 0 ] || elem; + } + + return elem; +} + +// Replace/restore the type attribute of script elements for safe DOM manipulation +function disableScript( elem ) { + elem.type = ( elem.getAttribute( "type" ) !== null ) + "/" + elem.type; + return elem; +} +function restoreScript( elem ) { + if ( ( elem.type || "" ).slice( 0, 5 ) === "true/" ) { + elem.type = elem.type.slice( 5 ); + } else { + elem.removeAttribute( "type" ); + } + + return elem; +} + +function cloneCopyEvent( src, dest ) { + var i, l, type, pdataOld, udataOld, udataCur, events; + + if ( dest.nodeType !== 1 ) { + return; + } + + // 1. Copy private data: events, handlers, etc. + if ( dataPriv.hasData( src ) ) { + pdataOld = dataPriv.get( src ); + events = pdataOld.events; + + if ( events ) { + dataPriv.remove( dest, "handle events" ); + + for ( type in events ) { + for ( i = 0, l = events[ type ].length; i < l; i++ ) { + jQuery.event.add( dest, type, events[ type ][ i ] ); + } + } + } + } + + // 2. Copy user data + if ( dataUser.hasData( src ) ) { + udataOld = dataUser.access( src ); + udataCur = jQuery.extend( {}, udataOld ); + + dataUser.set( dest, udataCur ); + } +} + +// Fix IE bugs, see support tests +function fixInput( src, dest ) { + var nodeName = dest.nodeName.toLowerCase(); + + // Fails to persist the checked state of a cloned checkbox or radio button. + if ( nodeName === "input" && rcheckableType.test( src.type ) ) { + dest.checked = src.checked; + + // Fails to return the selected option to the default selected state when cloning options + } else if ( nodeName === "input" || nodeName === "textarea" ) { + dest.defaultValue = src.defaultValue; + } +} + +function domManip( collection, args, callback, ignored ) { + + // Flatten any nested arrays + args = flat( args ); + + var fragment, first, scripts, hasScripts, node, doc, + i = 0, + l = collection.length, + iNoClone = l - 1, + value = args[ 0 ], + valueIsFunction = isFunction( value ); + + // We can't cloneNode fragments that contain checked, in WebKit + if ( valueIsFunction || + ( l > 1 && typeof value === "string" && + !support.checkClone && rchecked.test( value ) ) ) { + return collection.each( function( index ) { + var self = collection.eq( index ); + if ( valueIsFunction ) { + args[ 0 ] = value.call( this, index, self.html() ); + } + domManip( self, args, callback, ignored ); + } ); + } + + if ( l ) { + fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored ); + first = fragment.firstChild; + + if ( fragment.childNodes.length === 1 ) { + fragment = first; + } + + // Require either new content or an interest in ignored elements to invoke the callback + if ( first || ignored ) { + scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); + hasScripts = scripts.length; + + // Use the original fragment for the last item + // instead of the first because it can end up + // being emptied incorrectly in certain situations (#8070). + for ( ; i < l; i++ ) { + node = fragment; + + if ( i !== iNoClone ) { + node = jQuery.clone( node, true, true ); + + // Keep references to cloned scripts for later restoration + if ( hasScripts ) { + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( scripts, getAll( node, "script" ) ); + } + } + + callback.call( collection[ i ], node, i ); + } + + if ( hasScripts ) { + doc = scripts[ scripts.length - 1 ].ownerDocument; + + // Reenable scripts + jQuery.map( scripts, restoreScript ); + + // Evaluate executable scripts on first document insertion + for ( i = 0; i < hasScripts; i++ ) { + node = scripts[ i ]; + if ( rscriptType.test( node.type || "" ) && + !dataPriv.access( node, "globalEval" ) && + jQuery.contains( doc, node ) ) { + + if ( node.src && ( node.type || "" ).toLowerCase() !== "module" ) { + + // Optional AJAX dependency, but won't run scripts if not present + if ( jQuery._evalUrl && !node.noModule ) { + jQuery._evalUrl( node.src, { + nonce: node.nonce || node.getAttribute( "nonce" ) + }, doc ); + } + } else { + DOMEval( node.textContent.replace( rcleanScript, "" ), node, doc ); + } + } + } + } + } + } + + return collection; +} + +function remove( elem, selector, keepData ) { + var node, + nodes = selector ? jQuery.filter( selector, elem ) : elem, + i = 0; + + for ( ; ( node = nodes[ i ] ) != null; i++ ) { + if ( !keepData && node.nodeType === 1 ) { + jQuery.cleanData( getAll( node ) ); + } + + if ( node.parentNode ) { + if ( keepData && isAttached( node ) ) { + setGlobalEval( getAll( node, "script" ) ); + } + node.parentNode.removeChild( node ); + } + } + + return elem; +} + +jQuery.extend( { + htmlPrefilter: function( html ) { + return html; + }, + + clone: function( elem, dataAndEvents, deepDataAndEvents ) { + var i, l, srcElements, destElements, + clone = elem.cloneNode( true ), + inPage = isAttached( elem ); + + // Fix IE cloning issues + if ( !support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && + !jQuery.isXMLDoc( elem ) ) { + + // We eschew Sizzle here for performance reasons: https://jsperf.com/getall-vs-sizzle/2 + destElements = getAll( clone ); + srcElements = getAll( elem ); + + for ( i = 0, l = srcElements.length; i < l; i++ ) { + fixInput( srcElements[ i ], destElements[ i ] ); + } + } + + // Copy the events from the original to the clone + if ( dataAndEvents ) { + if ( deepDataAndEvents ) { + srcElements = srcElements || getAll( elem ); + destElements = destElements || getAll( clone ); + + for ( i = 0, l = srcElements.length; i < l; i++ ) { + cloneCopyEvent( srcElements[ i ], destElements[ i ] ); + } + } else { + cloneCopyEvent( elem, clone ); + } + } + + // Preserve script evaluation history + destElements = getAll( clone, "script" ); + if ( destElements.length > 0 ) { + setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); + } + + // Return the cloned set + return clone; + }, + + cleanData: function( elems ) { + var data, elem, type, + special = jQuery.event.special, + i = 0; + + for ( ; ( elem = elems[ i ] ) !== undefined; i++ ) { + if ( acceptData( elem ) ) { + if ( ( data = elem[ dataPriv.expando ] ) ) { + if ( data.events ) { + for ( type in data.events ) { + if ( special[ type ] ) { + jQuery.event.remove( elem, type ); + + // This is a shortcut to avoid jQuery.event.remove's overhead + } else { + jQuery.removeEvent( elem, type, data.handle ); + } + } + } + + // Support: Chrome <=35 - 45+ + // Assign undefined instead of using delete, see Data#remove + elem[ dataPriv.expando ] = undefined; + } + if ( elem[ dataUser.expando ] ) { + + // Support: Chrome <=35 - 45+ + // Assign undefined instead of using delete, see Data#remove + elem[ dataUser.expando ] = undefined; + } + } + } + } +} ); + +jQuery.fn.extend( { + detach: function( selector ) { + return remove( this, selector, true ); + }, + + remove: function( selector ) { + return remove( this, selector ); + }, + + text: function( value ) { + return access( this, function( value ) { + return value === undefined ? + jQuery.text( this ) : + this.empty().each( function() { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + this.textContent = value; + } + } ); + }, null, value, arguments.length ); + }, + + append: function() { + return domManip( this, arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.appendChild( elem ); + } + } ); + }, + + prepend: function() { + return domManip( this, arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.insertBefore( elem, target.firstChild ); + } + } ); + }, + + before: function() { + return domManip( this, arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this ); + } + } ); + }, + + after: function() { + return domManip( this, arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this.nextSibling ); + } + } ); + }, + + empty: function() { + var elem, + i = 0; + + for ( ; ( elem = this[ i ] ) != null; i++ ) { + if ( elem.nodeType === 1 ) { + + // Prevent memory leaks + jQuery.cleanData( getAll( elem, false ) ); + + // Remove any remaining nodes + elem.textContent = ""; + } + } + + return this; + }, + + clone: function( dataAndEvents, deepDataAndEvents ) { + dataAndEvents = dataAndEvents == null ? false : dataAndEvents; + deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; + + return this.map( function() { + return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); + } ); + }, + + html: function( value ) { + return access( this, function( value ) { + var elem = this[ 0 ] || {}, + i = 0, + l = this.length; + + if ( value === undefined && elem.nodeType === 1 ) { + return elem.innerHTML; + } + + // See if we can take a shortcut and just use innerHTML + if ( typeof value === "string" && !rnoInnerhtml.test( value ) && + !wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) { + + value = jQuery.htmlPrefilter( value ); + + try { + for ( ; i < l; i++ ) { + elem = this[ i ] || {}; + + // Remove element nodes and prevent memory leaks + if ( elem.nodeType === 1 ) { + jQuery.cleanData( getAll( elem, false ) ); + elem.innerHTML = value; + } + } + + elem = 0; + + // If using innerHTML throws an exception, use the fallback method + } catch ( e ) {} + } + + if ( elem ) { + this.empty().append( value ); + } + }, null, value, arguments.length ); + }, + + replaceWith: function() { + var ignored = []; + + // Make the changes, replacing each non-ignored context element with the new content + return domManip( this, arguments, function( elem ) { + var parent = this.parentNode; + + if ( jQuery.inArray( this, ignored ) < 0 ) { + jQuery.cleanData( getAll( this ) ); + if ( parent ) { + parent.replaceChild( elem, this ); + } + } + + // Force callback invocation + }, ignored ); + } +} ); + +jQuery.each( { + appendTo: "append", + prependTo: "prepend", + insertBefore: "before", + insertAfter: "after", + replaceAll: "replaceWith" +}, function( name, original ) { + jQuery.fn[ name ] = function( selector ) { + var elems, + ret = [], + insert = jQuery( selector ), + last = insert.length - 1, + i = 0; + + for ( ; i <= last; i++ ) { + elems = i === last ? this : this.clone( true ); + jQuery( insert[ i ] )[ original ]( elems ); + + // Support: Android <=4.0 only, PhantomJS 1 only + // .get() because push.apply(_, arraylike) throws on ancient WebKit + push.apply( ret, elems.get() ); + } + + return this.pushStack( ret ); + }; +} ); +var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" ); + +var getStyles = function( elem ) { + + // Support: IE <=11 only, Firefox <=30 (#15098, #14150) + // IE throws on elements created in popups + // FF meanwhile throws on frame elements through "defaultView.getComputedStyle" + var view = elem.ownerDocument.defaultView; + + if ( !view || !view.opener ) { + view = window; + } + + return view.getComputedStyle( elem ); + }; + +var swap = function( elem, options, callback ) { + var ret, name, + old = {}; + + // Remember the old values, and insert the new ones + for ( name in options ) { + old[ name ] = elem.style[ name ]; + elem.style[ name ] = options[ name ]; + } + + ret = callback.call( elem ); + + // Revert the old values + for ( name in options ) { + elem.style[ name ] = old[ name ]; + } + + return ret; +}; + + +var rboxStyle = new RegExp( cssExpand.join( "|" ), "i" ); + + + +( function() { + + // Executing both pixelPosition & boxSizingReliable tests require only one layout + // so they're executed at the same time to save the second computation. + function computeStyleTests() { + + // This is a singleton, we need to execute it only once + if ( !div ) { + return; + } + + container.style.cssText = "position:absolute;left:-11111px;width:60px;" + + "margin-top:1px;padding:0;border:0"; + div.style.cssText = + "position:relative;display:block;box-sizing:border-box;overflow:scroll;" + + "margin:auto;border:1px;padding:1px;" + + "width:60%;top:1%"; + documentElement.appendChild( container ).appendChild( div ); + + var divStyle = window.getComputedStyle( div ); + pixelPositionVal = divStyle.top !== "1%"; + + // Support: Android 4.0 - 4.3 only, Firefox <=3 - 44 + reliableMarginLeftVal = roundPixelMeasures( divStyle.marginLeft ) === 12; + + // Support: Android 4.0 - 4.3 only, Safari <=9.1 - 10.1, iOS <=7.0 - 9.3 + // Some styles come back with percentage values, even though they shouldn't + div.style.right = "60%"; + pixelBoxStylesVal = roundPixelMeasures( divStyle.right ) === 36; + + // Support: IE 9 - 11 only + // Detect misreporting of content dimensions for box-sizing:border-box elements + boxSizingReliableVal = roundPixelMeasures( divStyle.width ) === 36; + + // Support: IE 9 only + // Detect overflow:scroll screwiness (gh-3699) + // Support: Chrome <=64 + // Don't get tricked when zoom affects offsetWidth (gh-4029) + div.style.position = "absolute"; + scrollboxSizeVal = roundPixelMeasures( div.offsetWidth / 3 ) === 12; + + documentElement.removeChild( container ); + + // Nullify the div so it wouldn't be stored in the memory and + // it will also be a sign that checks already performed + div = null; + } + + function roundPixelMeasures( measure ) { + return Math.round( parseFloat( measure ) ); + } + + var pixelPositionVal, boxSizingReliableVal, scrollboxSizeVal, pixelBoxStylesVal, + reliableTrDimensionsVal, reliableMarginLeftVal, + container = document.createElement( "div" ), + div = document.createElement( "div" ); + + // Finish early in limited (non-browser) environments + if ( !div.style ) { + return; + } + + // Support: IE <=9 - 11 only + // Style of cloned element affects source element cloned (#8908) + div.style.backgroundClip = "content-box"; + div.cloneNode( true ).style.backgroundClip = ""; + support.clearCloneStyle = div.style.backgroundClip === "content-box"; + + jQuery.extend( support, { + boxSizingReliable: function() { + computeStyleTests(); + return boxSizingReliableVal; + }, + pixelBoxStyles: function() { + computeStyleTests(); + return pixelBoxStylesVal; + }, + pixelPosition: function() { + computeStyleTests(); + return pixelPositionVal; + }, + reliableMarginLeft: function() { + computeStyleTests(); + return reliableMarginLeftVal; + }, + scrollboxSize: function() { + computeStyleTests(); + return scrollboxSizeVal; + }, + + // Support: IE 9 - 11+, Edge 15 - 18+ + // IE/Edge misreport `getComputedStyle` of table rows with width/height + // set in CSS while `offset*` properties report correct values. + // Behavior in IE 9 is more subtle than in newer versions & it passes + // some versions of this test; make sure not to make it pass there! + reliableTrDimensions: function() { + var table, tr, trChild, trStyle; + if ( reliableTrDimensionsVal == null ) { + table = document.createElement( "table" ); + tr = document.createElement( "tr" ); + trChild = document.createElement( "div" ); + + table.style.cssText = "position:absolute;left:-11111px"; + tr.style.height = "1px"; + trChild.style.height = "9px"; + + documentElement + .appendChild( table ) + .appendChild( tr ) + .appendChild( trChild ); + + trStyle = window.getComputedStyle( tr ); + reliableTrDimensionsVal = parseInt( trStyle.height ) > 3; + + documentElement.removeChild( table ); + } + return reliableTrDimensionsVal; + } + } ); +} )(); + + +function curCSS( elem, name, computed ) { + var width, minWidth, maxWidth, ret, + + // Support: Firefox 51+ + // Retrieving style before computed somehow + // fixes an issue with getting wrong values + // on detached elements + style = elem.style; + + computed = computed || getStyles( elem ); + + // getPropertyValue is needed for: + // .css('filter') (IE 9 only, #12537) + // .css('--customProperty) (#3144) + if ( computed ) { + ret = computed.getPropertyValue( name ) || computed[ name ]; + + if ( ret === "" && !isAttached( elem ) ) { + ret = jQuery.style( elem, name ); + } + + // A tribute to the "awesome hack by Dean Edwards" + // Android Browser returns percentage for some values, + // but width seems to be reliably pixels. + // This is against the CSSOM draft spec: + // https://drafts.csswg.org/cssom/#resolved-values + if ( !support.pixelBoxStyles() && rnumnonpx.test( ret ) && rboxStyle.test( name ) ) { + + // Remember the original values + width = style.width; + minWidth = style.minWidth; + maxWidth = style.maxWidth; + + // Put in the new values to get a computed value out + style.minWidth = style.maxWidth = style.width = ret; + ret = computed.width; + + // Revert the changed values + style.width = width; + style.minWidth = minWidth; + style.maxWidth = maxWidth; + } + } + + return ret !== undefined ? + + // Support: IE <=9 - 11 only + // IE returns zIndex value as an integer. + ret + "" : + ret; +} + + +function addGetHookIf( conditionFn, hookFn ) { + + // Define the hook, we'll check on the first run if it's really needed. + return { + get: function() { + if ( conditionFn() ) { + + // Hook not needed (or it's not possible to use it due + // to missing dependency), remove it. + delete this.get; + return; + } + + // Hook needed; redefine it so that the support test is not executed again. + return ( this.get = hookFn ).apply( this, arguments ); + } + }; +} + + +var cssPrefixes = [ "Webkit", "Moz", "ms" ], + emptyStyle = document.createElement( "div" ).style, + vendorProps = {}; + +// Return a vendor-prefixed property or undefined +function vendorPropName( name ) { + + // Check for vendor prefixed names + var capName = name[ 0 ].toUpperCase() + name.slice( 1 ), + i = cssPrefixes.length; + + while ( i-- ) { + name = cssPrefixes[ i ] + capName; + if ( name in emptyStyle ) { + return name; + } + } +} + +// Return a potentially-mapped jQuery.cssProps or vendor prefixed property +function finalPropName( name ) { + var final = jQuery.cssProps[ name ] || vendorProps[ name ]; + + if ( final ) { + return final; + } + if ( name in emptyStyle ) { + return name; + } + return vendorProps[ name ] = vendorPropName( name ) || name; +} + + +var + + // Swappable if display is none or starts with table + // except "table", "table-cell", or "table-caption" + // See here for display values: https://developer.mozilla.org/en-US/docs/CSS/display + rdisplayswap = /^(none|table(?!-c[ea]).+)/, + rcustomProp = /^--/, + cssShow = { position: "absolute", visibility: "hidden", display: "block" }, + cssNormalTransform = { + letterSpacing: "0", + fontWeight: "400" + }; + +function setPositiveNumber( _elem, value, subtract ) { + + // Any relative (+/-) values have already been + // normalized at this point + var matches = rcssNum.exec( value ); + return matches ? + + // Guard against undefined "subtract", e.g., when used as in cssHooks + Math.max( 0, matches[ 2 ] - ( subtract || 0 ) ) + ( matches[ 3 ] || "px" ) : + value; +} + +function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, computedVal ) { + var i = dimension === "width" ? 1 : 0, + extra = 0, + delta = 0; + + // Adjustment may not be necessary + if ( box === ( isBorderBox ? "border" : "content" ) ) { + return 0; + } + + for ( ; i < 4; i += 2 ) { + + // Both box models exclude margin + if ( box === "margin" ) { + delta += jQuery.css( elem, box + cssExpand[ i ], true, styles ); + } + + // If we get here with a content-box, we're seeking "padding" or "border" or "margin" + if ( !isBorderBox ) { + + // Add padding + delta += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); + + // For "border" or "margin", add border + if ( box !== "padding" ) { + delta += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + + // But still keep track of it otherwise + } else { + extra += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + } + + // If we get here with a border-box (content + padding + border), we're seeking "content" or + // "padding" or "margin" + } else { + + // For "content", subtract padding + if ( box === "content" ) { + delta -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); + } + + // For "content" or "padding", subtract border + if ( box !== "margin" ) { + delta -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + } + } + } + + // Account for positive content-box scroll gutter when requested by providing computedVal + if ( !isBorderBox && computedVal >= 0 ) { + + // offsetWidth/offsetHeight is a rounded sum of content, padding, scroll gutter, and border + // Assuming integer scroll gutter, subtract the rest and round down + delta += Math.max( 0, Math.ceil( + elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - + computedVal - + delta - + extra - + 0.5 + + // If offsetWidth/offsetHeight is unknown, then we can't determine content-box scroll gutter + // Use an explicit zero to avoid NaN (gh-3964) + ) ) || 0; + } + + return delta; +} + +function getWidthOrHeight( elem, dimension, extra ) { + + // Start with computed style + var styles = getStyles( elem ), + + // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-4322). + // Fake content-box until we know it's needed to know the true value. + boxSizingNeeded = !support.boxSizingReliable() || extra, + isBorderBox = boxSizingNeeded && + jQuery.css( elem, "boxSizing", false, styles ) === "border-box", + valueIsBorderBox = isBorderBox, + + val = curCSS( elem, dimension, styles ), + offsetProp = "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ); + + // Support: Firefox <=54 + // Return a confounding non-pixel value or feign ignorance, as appropriate. + if ( rnumnonpx.test( val ) ) { + if ( !extra ) { + return val; + } + val = "auto"; + } + + + // Support: IE 9 - 11 only + // Use offsetWidth/offsetHeight for when box sizing is unreliable. + // In those cases, the computed value can be trusted to be border-box. + if ( ( !support.boxSizingReliable() && isBorderBox || + + // Support: IE 10 - 11+, Edge 15 - 18+ + // IE/Edge misreport `getComputedStyle` of table rows with width/height + // set in CSS while `offset*` properties report correct values. + // Interestingly, in some cases IE 9 doesn't suffer from this issue. + !support.reliableTrDimensions() && nodeName( elem, "tr" ) || + + // Fall back to offsetWidth/offsetHeight when value is "auto" + // This happens for inline elements with no explicit setting (gh-3571) + val === "auto" || + + // Support: Android <=4.1 - 4.3 only + // Also use offsetWidth/offsetHeight for misreported inline dimensions (gh-3602) + !parseFloat( val ) && jQuery.css( elem, "display", false, styles ) === "inline" ) && + + // Make sure the element is visible & connected + elem.getClientRects().length ) { + + isBorderBox = jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; + + // Where available, offsetWidth/offsetHeight approximate border box dimensions. + // Where not available (e.g., SVG), assume unreliable box-sizing and interpret the + // retrieved value as a content box dimension. + valueIsBorderBox = offsetProp in elem; + if ( valueIsBorderBox ) { + val = elem[ offsetProp ]; + } + } + + // Normalize "" and auto + val = parseFloat( val ) || 0; + + // Adjust for the element's box model + return ( val + + boxModelAdjustment( + elem, + dimension, + extra || ( isBorderBox ? "border" : "content" ), + valueIsBorderBox, + styles, + + // Provide the current computed size to request scroll gutter calculation (gh-3589) + val + ) + ) + "px"; +} + +jQuery.extend( { + + // Add in style property hooks for overriding the default + // behavior of getting and setting a style property + cssHooks: { + opacity: { + get: function( elem, computed ) { + if ( computed ) { + + // We should always get a number back from opacity + var ret = curCSS( elem, "opacity" ); + return ret === "" ? "1" : ret; + } + } + } + }, + + // Don't automatically add "px" to these possibly-unitless properties + cssNumber: { + "animationIterationCount": true, + "columnCount": true, + "fillOpacity": true, + "flexGrow": true, + "flexShrink": true, + "fontWeight": true, + "gridArea": true, + "gridColumn": true, + "gridColumnEnd": true, + "gridColumnStart": true, + "gridRow": true, + "gridRowEnd": true, + "gridRowStart": true, + "lineHeight": true, + "opacity": true, + "order": true, + "orphans": true, + "widows": true, + "zIndex": true, + "zoom": true + }, + + // Add in properties whose names you wish to fix before + // setting or getting the value + cssProps: {}, + + // Get and set the style property on a DOM Node + style: function( elem, name, value, extra ) { + + // Don't set styles on text and comment nodes + if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { + return; + } + + // Make sure that we're working with the right name + var ret, type, hooks, + origName = camelCase( name ), + isCustomProp = rcustomProp.test( name ), + style = elem.style; + + // Make sure that we're working with the right name. We don't + // want to query the value if it is a CSS custom property + // since they are user-defined. + if ( !isCustomProp ) { + name = finalPropName( origName ); + } + + // Gets hook for the prefixed version, then unprefixed version + hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; + + // Check if we're setting a value + if ( value !== undefined ) { + type = typeof value; + + // Convert "+=" or "-=" to relative numbers (#7345) + if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) { + value = adjustCSS( elem, name, ret ); + + // Fixes bug #9237 + type = "number"; + } + + // Make sure that null and NaN values aren't set (#7116) + if ( value == null || value !== value ) { + return; + } + + // If a number was passed in, add the unit (except for certain CSS properties) + // The isCustomProp check can be removed in jQuery 4.0 when we only auto-append + // "px" to a few hardcoded values. + if ( type === "number" && !isCustomProp ) { + value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" ); + } + + // background-* props affect original clone's values + if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) { + style[ name ] = "inherit"; + } + + // If a hook was provided, use that value, otherwise just set the specified value + if ( !hooks || !( "set" in hooks ) || + ( value = hooks.set( elem, value, extra ) ) !== undefined ) { + + if ( isCustomProp ) { + style.setProperty( name, value ); + } else { + style[ name ] = value; + } + } + + } else { + + // If a hook was provided get the non-computed value from there + if ( hooks && "get" in hooks && + ( ret = hooks.get( elem, false, extra ) ) !== undefined ) { + + return ret; + } + + // Otherwise just get the value from the style object + return style[ name ]; + } + }, + + css: function( elem, name, extra, styles ) { + var val, num, hooks, + origName = camelCase( name ), + isCustomProp = rcustomProp.test( name ); + + // Make sure that we're working with the right name. We don't + // want to modify the value if it is a CSS custom property + // since they are user-defined. + if ( !isCustomProp ) { + name = finalPropName( origName ); + } + + // Try prefixed name followed by the unprefixed name + hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; + + // If a hook was provided get the computed value from there + if ( hooks && "get" in hooks ) { + val = hooks.get( elem, true, extra ); + } + + // Otherwise, if a way to get the computed value exists, use that + if ( val === undefined ) { + val = curCSS( elem, name, styles ); + } + + // Convert "normal" to computed value + if ( val === "normal" && name in cssNormalTransform ) { + val = cssNormalTransform[ name ]; + } + + // Make numeric if forced or a qualifier was provided and val looks numeric + if ( extra === "" || extra ) { + num = parseFloat( val ); + return extra === true || isFinite( num ) ? num || 0 : val; + } + + return val; + } +} ); + +jQuery.each( [ "height", "width" ], function( _i, dimension ) { + jQuery.cssHooks[ dimension ] = { + get: function( elem, computed, extra ) { + if ( computed ) { + + // Certain elements can have dimension info if we invisibly show them + // but it must have a current display style that would benefit + return rdisplayswap.test( jQuery.css( elem, "display" ) ) && + + // Support: Safari 8+ + // Table columns in Safari have non-zero offsetWidth & zero + // getBoundingClientRect().width unless display is changed. + // Support: IE <=11 only + // Running getBoundingClientRect on a disconnected node + // in IE throws an error. + ( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ? + swap( elem, cssShow, function() { + return getWidthOrHeight( elem, dimension, extra ); + } ) : + getWidthOrHeight( elem, dimension, extra ); + } + }, + + set: function( elem, value, extra ) { + var matches, + styles = getStyles( elem ), + + // Only read styles.position if the test has a chance to fail + // to avoid forcing a reflow. + scrollboxSizeBuggy = !support.scrollboxSize() && + styles.position === "absolute", + + // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-3991) + boxSizingNeeded = scrollboxSizeBuggy || extra, + isBorderBox = boxSizingNeeded && + jQuery.css( elem, "boxSizing", false, styles ) === "border-box", + subtract = extra ? + boxModelAdjustment( + elem, + dimension, + extra, + isBorderBox, + styles + ) : + 0; + + // Account for unreliable border-box dimensions by comparing offset* to computed and + // faking a content-box to get border and padding (gh-3699) + if ( isBorderBox && scrollboxSizeBuggy ) { + subtract -= Math.ceil( + elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - + parseFloat( styles[ dimension ] ) - + boxModelAdjustment( elem, dimension, "border", false, styles ) - + 0.5 + ); + } + + // Convert to pixels if value adjustment is needed + if ( subtract && ( matches = rcssNum.exec( value ) ) && + ( matches[ 3 ] || "px" ) !== "px" ) { + + elem.style[ dimension ] = value; + value = jQuery.css( elem, dimension ); + } + + return setPositiveNumber( elem, value, subtract ); + } + }; +} ); + +jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft, + function( elem, computed ) { + if ( computed ) { + return ( parseFloat( curCSS( elem, "marginLeft" ) ) || + elem.getBoundingClientRect().left - + swap( elem, { marginLeft: 0 }, function() { + return elem.getBoundingClientRect().left; + } ) + ) + "px"; + } + } +); + +// These hooks are used by animate to expand properties +jQuery.each( { + margin: "", + padding: "", + border: "Width" +}, function( prefix, suffix ) { + jQuery.cssHooks[ prefix + suffix ] = { + expand: function( value ) { + var i = 0, + expanded = {}, + + // Assumes a single number if not a string + parts = typeof value === "string" ? value.split( " " ) : [ value ]; + + for ( ; i < 4; i++ ) { + expanded[ prefix + cssExpand[ i ] + suffix ] = + parts[ i ] || parts[ i - 2 ] || parts[ 0 ]; + } + + return expanded; + } + }; + + if ( prefix !== "margin" ) { + jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber; + } +} ); + +jQuery.fn.extend( { + css: function( name, value ) { + return access( this, function( elem, name, value ) { + var styles, len, + map = {}, + i = 0; + + if ( Array.isArray( name ) ) { + styles = getStyles( elem ); + len = name.length; + + for ( ; i < len; i++ ) { + map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); + } + + return map; + } + + return value !== undefined ? + jQuery.style( elem, name, value ) : + jQuery.css( elem, name ); + }, name, value, arguments.length > 1 ); + } +} ); + + +function Tween( elem, options, prop, end, easing ) { + return new Tween.prototype.init( elem, options, prop, end, easing ); +} +jQuery.Tween = Tween; + +Tween.prototype = { + constructor: Tween, + init: function( elem, options, prop, end, easing, unit ) { + this.elem = elem; + this.prop = prop; + this.easing = easing || jQuery.easing._default; + this.options = options; + this.start = this.now = this.cur(); + this.end = end; + this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" ); + }, + cur: function() { + var hooks = Tween.propHooks[ this.prop ]; + + return hooks && hooks.get ? + hooks.get( this ) : + Tween.propHooks._default.get( this ); + }, + run: function( percent ) { + var eased, + hooks = Tween.propHooks[ this.prop ]; + + if ( this.options.duration ) { + this.pos = eased = jQuery.easing[ this.easing ]( + percent, this.options.duration * percent, 0, 1, this.options.duration + ); + } else { + this.pos = eased = percent; + } + this.now = ( this.end - this.start ) * eased + this.start; + + if ( this.options.step ) { + this.options.step.call( this.elem, this.now, this ); + } + + if ( hooks && hooks.set ) { + hooks.set( this ); + } else { + Tween.propHooks._default.set( this ); + } + return this; + } +}; + +Tween.prototype.init.prototype = Tween.prototype; + +Tween.propHooks = { + _default: { + get: function( tween ) { + var result; + + // Use a property on the element directly when it is not a DOM element, + // or when there is no matching style property that exists. + if ( tween.elem.nodeType !== 1 || + tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) { + return tween.elem[ tween.prop ]; + } + + // Passing an empty string as a 3rd parameter to .css will automatically + // attempt a parseFloat and fallback to a string if the parse fails. + // Simple values such as "10px" are parsed to Float; + // complex values such as "rotate(1rad)" are returned as-is. + result = jQuery.css( tween.elem, tween.prop, "" ); + + // Empty strings, null, undefined and "auto" are converted to 0. + return !result || result === "auto" ? 0 : result; + }, + set: function( tween ) { + + // Use step hook for back compat. + // Use cssHook if its there. + // Use .style if available and use plain properties where available. + if ( jQuery.fx.step[ tween.prop ] ) { + jQuery.fx.step[ tween.prop ]( tween ); + } else if ( tween.elem.nodeType === 1 && ( + jQuery.cssHooks[ tween.prop ] || + tween.elem.style[ finalPropName( tween.prop ) ] != null ) ) { + jQuery.style( tween.elem, tween.prop, tween.now + tween.unit ); + } else { + tween.elem[ tween.prop ] = tween.now; + } + } + } +}; + +// Support: IE <=9 only +// Panic based approach to setting things on disconnected nodes +Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = { + set: function( tween ) { + if ( tween.elem.nodeType && tween.elem.parentNode ) { + tween.elem[ tween.prop ] = tween.now; + } + } +}; + +jQuery.easing = { + linear: function( p ) { + return p; + }, + swing: function( p ) { + return 0.5 - Math.cos( p * Math.PI ) / 2; + }, + _default: "swing" +}; + +jQuery.fx = Tween.prototype.init; + +// Back compat <1.8 extension point +jQuery.fx.step = {}; + + + + +var + fxNow, inProgress, + rfxtypes = /^(?:toggle|show|hide)$/, + rrun = /queueHooks$/; + +function schedule() { + if ( inProgress ) { + if ( document.hidden === false && window.requestAnimationFrame ) { + window.requestAnimationFrame( schedule ); + } else { + window.setTimeout( schedule, jQuery.fx.interval ); + } + + jQuery.fx.tick(); + } +} + +// Animations created synchronously will run synchronously +function createFxNow() { + window.setTimeout( function() { + fxNow = undefined; + } ); + return ( fxNow = Date.now() ); +} + +// Generate parameters to create a standard animation +function genFx( type, includeWidth ) { + var which, + i = 0, + attrs = { height: type }; + + // If we include width, step value is 1 to do all cssExpand values, + // otherwise step value is 2 to skip over Left and Right + includeWidth = includeWidth ? 1 : 0; + for ( ; i < 4; i += 2 - includeWidth ) { + which = cssExpand[ i ]; + attrs[ "margin" + which ] = attrs[ "padding" + which ] = type; + } + + if ( includeWidth ) { + attrs.opacity = attrs.width = type; + } + + return attrs; +} + +function createTween( value, prop, animation ) { + var tween, + collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ), + index = 0, + length = collection.length; + for ( ; index < length; index++ ) { + if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) { + + // We're done with this property + return tween; + } + } +} + +function defaultPrefilter( elem, props, opts ) { + var prop, value, toggle, hooks, oldfire, propTween, restoreDisplay, display, + isBox = "width" in props || "height" in props, + anim = this, + orig = {}, + style = elem.style, + hidden = elem.nodeType && isHiddenWithinTree( elem ), + dataShow = dataPriv.get( elem, "fxshow" ); + + // Queue-skipping animations hijack the fx hooks + if ( !opts.queue ) { + hooks = jQuery._queueHooks( elem, "fx" ); + if ( hooks.unqueued == null ) { + hooks.unqueued = 0; + oldfire = hooks.empty.fire; + hooks.empty.fire = function() { + if ( !hooks.unqueued ) { + oldfire(); + } + }; + } + hooks.unqueued++; + + anim.always( function() { + + // Ensure the complete handler is called before this completes + anim.always( function() { + hooks.unqueued--; + if ( !jQuery.queue( elem, "fx" ).length ) { + hooks.empty.fire(); + } + } ); + } ); + } + + // Detect show/hide animations + for ( prop in props ) { + value = props[ prop ]; + if ( rfxtypes.test( value ) ) { + delete props[ prop ]; + toggle = toggle || value === "toggle"; + if ( value === ( hidden ? "hide" : "show" ) ) { + + // Pretend to be hidden if this is a "show" and + // there is still data from a stopped show/hide + if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) { + hidden = true; + + // Ignore all other no-op show/hide data + } else { + continue; + } + } + orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop ); + } + } + + // Bail out if this is a no-op like .hide().hide() + propTween = !jQuery.isEmptyObject( props ); + if ( !propTween && jQuery.isEmptyObject( orig ) ) { + return; + } + + // Restrict "overflow" and "display" styles during box animations + if ( isBox && elem.nodeType === 1 ) { + + // Support: IE <=9 - 11, Edge 12 - 15 + // Record all 3 overflow attributes because IE does not infer the shorthand + // from identically-valued overflowX and overflowY and Edge just mirrors + // the overflowX value there. + opts.overflow = [ style.overflow, style.overflowX, style.overflowY ]; + + // Identify a display type, preferring old show/hide data over the CSS cascade + restoreDisplay = dataShow && dataShow.display; + if ( restoreDisplay == null ) { + restoreDisplay = dataPriv.get( elem, "display" ); + } + display = jQuery.css( elem, "display" ); + if ( display === "none" ) { + if ( restoreDisplay ) { + display = restoreDisplay; + } else { + + // Get nonempty value(s) by temporarily forcing visibility + showHide( [ elem ], true ); + restoreDisplay = elem.style.display || restoreDisplay; + display = jQuery.css( elem, "display" ); + showHide( [ elem ] ); + } + } + + // Animate inline elements as inline-block + if ( display === "inline" || display === "inline-block" && restoreDisplay != null ) { + if ( jQuery.css( elem, "float" ) === "none" ) { + + // Restore the original display value at the end of pure show/hide animations + if ( !propTween ) { + anim.done( function() { + style.display = restoreDisplay; + } ); + if ( restoreDisplay == null ) { + display = style.display; + restoreDisplay = display === "none" ? "" : display; + } + } + style.display = "inline-block"; + } + } + } + + if ( opts.overflow ) { + style.overflow = "hidden"; + anim.always( function() { + style.overflow = opts.overflow[ 0 ]; + style.overflowX = opts.overflow[ 1 ]; + style.overflowY = opts.overflow[ 2 ]; + } ); + } + + // Implement show/hide animations + propTween = false; + for ( prop in orig ) { + + // General show/hide setup for this element animation + if ( !propTween ) { + if ( dataShow ) { + if ( "hidden" in dataShow ) { + hidden = dataShow.hidden; + } + } else { + dataShow = dataPriv.access( elem, "fxshow", { display: restoreDisplay } ); + } + + // Store hidden/visible for toggle so `.stop().toggle()` "reverses" + if ( toggle ) { + dataShow.hidden = !hidden; + } + + // Show elements before animating them + if ( hidden ) { + showHide( [ elem ], true ); + } + + /* eslint-disable no-loop-func */ + + anim.done( function() { + + /* eslint-enable no-loop-func */ + + // The final step of a "hide" animation is actually hiding the element + if ( !hidden ) { + showHide( [ elem ] ); + } + dataPriv.remove( elem, "fxshow" ); + for ( prop in orig ) { + jQuery.style( elem, prop, orig[ prop ] ); + } + } ); + } + + // Per-property setup + propTween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim ); + if ( !( prop in dataShow ) ) { + dataShow[ prop ] = propTween.start; + if ( hidden ) { + propTween.end = propTween.start; + propTween.start = 0; + } + } + } +} + +function propFilter( props, specialEasing ) { + var index, name, easing, value, hooks; + + // camelCase, specialEasing and expand cssHook pass + for ( index in props ) { + name = camelCase( index ); + easing = specialEasing[ name ]; + value = props[ index ]; + if ( Array.isArray( value ) ) { + easing = value[ 1 ]; + value = props[ index ] = value[ 0 ]; + } + + if ( index !== name ) { + props[ name ] = value; + delete props[ index ]; + } + + hooks = jQuery.cssHooks[ name ]; + if ( hooks && "expand" in hooks ) { + value = hooks.expand( value ); + delete props[ name ]; + + // Not quite $.extend, this won't overwrite existing keys. + // Reusing 'index' because we have the correct "name" + for ( index in value ) { + if ( !( index in props ) ) { + props[ index ] = value[ index ]; + specialEasing[ index ] = easing; + } + } + } else { + specialEasing[ name ] = easing; + } + } +} + +function Animation( elem, properties, options ) { + var result, + stopped, + index = 0, + length = Animation.prefilters.length, + deferred = jQuery.Deferred().always( function() { + + // Don't match elem in the :animated selector + delete tick.elem; + } ), + tick = function() { + if ( stopped ) { + return false; + } + var currentTime = fxNow || createFxNow(), + remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ), + + // Support: Android 2.3 only + // Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497) + temp = remaining / animation.duration || 0, + percent = 1 - temp, + index = 0, + length = animation.tweens.length; + + for ( ; index < length; index++ ) { + animation.tweens[ index ].run( percent ); + } + + deferred.notifyWith( elem, [ animation, percent, remaining ] ); + + // If there's more to do, yield + if ( percent < 1 && length ) { + return remaining; + } + + // If this was an empty animation, synthesize a final progress notification + if ( !length ) { + deferred.notifyWith( elem, [ animation, 1, 0 ] ); + } + + // Resolve the animation and report its conclusion + deferred.resolveWith( elem, [ animation ] ); + return false; + }, + animation = deferred.promise( { + elem: elem, + props: jQuery.extend( {}, properties ), + opts: jQuery.extend( true, { + specialEasing: {}, + easing: jQuery.easing._default + }, options ), + originalProperties: properties, + originalOptions: options, + startTime: fxNow || createFxNow(), + duration: options.duration, + tweens: [], + createTween: function( prop, end ) { + var tween = jQuery.Tween( elem, animation.opts, prop, end, + animation.opts.specialEasing[ prop ] || animation.opts.easing ); + animation.tweens.push( tween ); + return tween; + }, + stop: function( gotoEnd ) { + var index = 0, + + // If we are going to the end, we want to run all the tweens + // otherwise we skip this part + length = gotoEnd ? animation.tweens.length : 0; + if ( stopped ) { + return this; + } + stopped = true; + for ( ; index < length; index++ ) { + animation.tweens[ index ].run( 1 ); + } + + // Resolve when we played the last frame; otherwise, reject + if ( gotoEnd ) { + deferred.notifyWith( elem, [ animation, 1, 0 ] ); + deferred.resolveWith( elem, [ animation, gotoEnd ] ); + } else { + deferred.rejectWith( elem, [ animation, gotoEnd ] ); + } + return this; + } + } ), + props = animation.props; + + propFilter( props, animation.opts.specialEasing ); + + for ( ; index < length; index++ ) { + result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts ); + if ( result ) { + if ( isFunction( result.stop ) ) { + jQuery._queueHooks( animation.elem, animation.opts.queue ).stop = + result.stop.bind( result ); + } + return result; + } + } + + jQuery.map( props, createTween, animation ); + + if ( isFunction( animation.opts.start ) ) { + animation.opts.start.call( elem, animation ); + } + + // Attach callbacks from options + animation + .progress( animation.opts.progress ) + .done( animation.opts.done, animation.opts.complete ) + .fail( animation.opts.fail ) + .always( animation.opts.always ); + + jQuery.fx.timer( + jQuery.extend( tick, { + elem: elem, + anim: animation, + queue: animation.opts.queue + } ) + ); + + return animation; +} + +jQuery.Animation = jQuery.extend( Animation, { + + tweeners: { + "*": [ function( prop, value ) { + var tween = this.createTween( prop, value ); + adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween ); + return tween; + } ] + }, + + tweener: function( props, callback ) { + if ( isFunction( props ) ) { + callback = props; + props = [ "*" ]; + } else { + props = props.match( rnothtmlwhite ); + } + + var prop, + index = 0, + length = props.length; + + for ( ; index < length; index++ ) { + prop = props[ index ]; + Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || []; + Animation.tweeners[ prop ].unshift( callback ); + } + }, + + prefilters: [ defaultPrefilter ], + + prefilter: function( callback, prepend ) { + if ( prepend ) { + Animation.prefilters.unshift( callback ); + } else { + Animation.prefilters.push( callback ); + } + } +} ); + +jQuery.speed = function( speed, easing, fn ) { + var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : { + complete: fn || !fn && easing || + isFunction( speed ) && speed, + duration: speed, + easing: fn && easing || easing && !isFunction( easing ) && easing + }; + + // Go to the end state if fx are off + if ( jQuery.fx.off ) { + opt.duration = 0; + + } else { + if ( typeof opt.duration !== "number" ) { + if ( opt.duration in jQuery.fx.speeds ) { + opt.duration = jQuery.fx.speeds[ opt.duration ]; + + } else { + opt.duration = jQuery.fx.speeds._default; + } + } + } + + // Normalize opt.queue - true/undefined/null -> "fx" + if ( opt.queue == null || opt.queue === true ) { + opt.queue = "fx"; + } + + // Queueing + opt.old = opt.complete; + + opt.complete = function() { + if ( isFunction( opt.old ) ) { + opt.old.call( this ); + } + + if ( opt.queue ) { + jQuery.dequeue( this, opt.queue ); + } + }; + + return opt; +}; + +jQuery.fn.extend( { + fadeTo: function( speed, to, easing, callback ) { + + // Show any hidden elements after setting opacity to 0 + return this.filter( isHiddenWithinTree ).css( "opacity", 0 ).show() + + // Animate to the value specified + .end().animate( { opacity: to }, speed, easing, callback ); + }, + animate: function( prop, speed, easing, callback ) { + var empty = jQuery.isEmptyObject( prop ), + optall = jQuery.speed( speed, easing, callback ), + doAnimation = function() { + + // Operate on a copy of prop so per-property easing won't be lost + var anim = Animation( this, jQuery.extend( {}, prop ), optall ); + + // Empty animations, or finishing resolves immediately + if ( empty || dataPriv.get( this, "finish" ) ) { + anim.stop( true ); + } + }; + doAnimation.finish = doAnimation; + + return empty || optall.queue === false ? + this.each( doAnimation ) : + this.queue( optall.queue, doAnimation ); + }, + stop: function( type, clearQueue, gotoEnd ) { + var stopQueue = function( hooks ) { + var stop = hooks.stop; + delete hooks.stop; + stop( gotoEnd ); + }; + + if ( typeof type !== "string" ) { + gotoEnd = clearQueue; + clearQueue = type; + type = undefined; + } + if ( clearQueue ) { + this.queue( type || "fx", [] ); + } + + return this.each( function() { + var dequeue = true, + index = type != null && type + "queueHooks", + timers = jQuery.timers, + data = dataPriv.get( this ); + + if ( index ) { + if ( data[ index ] && data[ index ].stop ) { + stopQueue( data[ index ] ); + } + } else { + for ( index in data ) { + if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) { + stopQueue( data[ index ] ); + } + } + } + + for ( index = timers.length; index--; ) { + if ( timers[ index ].elem === this && + ( type == null || timers[ index ].queue === type ) ) { + + timers[ index ].anim.stop( gotoEnd ); + dequeue = false; + timers.splice( index, 1 ); + } + } + + // Start the next in the queue if the last step wasn't forced. + // Timers currently will call their complete callbacks, which + // will dequeue but only if they were gotoEnd. + if ( dequeue || !gotoEnd ) { + jQuery.dequeue( this, type ); + } + } ); + }, + finish: function( type ) { + if ( type !== false ) { + type = type || "fx"; + } + return this.each( function() { + var index, + data = dataPriv.get( this ), + queue = data[ type + "queue" ], + hooks = data[ type + "queueHooks" ], + timers = jQuery.timers, + length = queue ? queue.length : 0; + + // Enable finishing flag on private data + data.finish = true; + + // Empty the queue first + jQuery.queue( this, type, [] ); + + if ( hooks && hooks.stop ) { + hooks.stop.call( this, true ); + } + + // Look for any active animations, and finish them + for ( index = timers.length; index--; ) { + if ( timers[ index ].elem === this && timers[ index ].queue === type ) { + timers[ index ].anim.stop( true ); + timers.splice( index, 1 ); + } + } + + // Look for any animations in the old queue and finish them + for ( index = 0; index < length; index++ ) { + if ( queue[ index ] && queue[ index ].finish ) { + queue[ index ].finish.call( this ); + } + } + + // Turn off finishing flag + delete data.finish; + } ); + } +} ); + +jQuery.each( [ "toggle", "show", "hide" ], function( _i, name ) { + var cssFn = jQuery.fn[ name ]; + jQuery.fn[ name ] = function( speed, easing, callback ) { + return speed == null || typeof speed === "boolean" ? + cssFn.apply( this, arguments ) : + this.animate( genFx( name, true ), speed, easing, callback ); + }; +} ); + +// Generate shortcuts for custom animations +jQuery.each( { + slideDown: genFx( "show" ), + slideUp: genFx( "hide" ), + slideToggle: genFx( "toggle" ), + fadeIn: { opacity: "show" }, + fadeOut: { opacity: "hide" }, + fadeToggle: { opacity: "toggle" } +}, function( name, props ) { + jQuery.fn[ name ] = function( speed, easing, callback ) { + return this.animate( props, speed, easing, callback ); + }; +} ); + +jQuery.timers = []; +jQuery.fx.tick = function() { + var timer, + i = 0, + timers = jQuery.timers; + + fxNow = Date.now(); + + for ( ; i < timers.length; i++ ) { + timer = timers[ i ]; + + // Run the timer and safely remove it when done (allowing for external removal) + if ( !timer() && timers[ i ] === timer ) { + timers.splice( i--, 1 ); + } + } + + if ( !timers.length ) { + jQuery.fx.stop(); + } + fxNow = undefined; +}; + +jQuery.fx.timer = function( timer ) { + jQuery.timers.push( timer ); + jQuery.fx.start(); +}; + +jQuery.fx.interval = 13; +jQuery.fx.start = function() { + if ( inProgress ) { + return; + } + + inProgress = true; + schedule(); +}; + +jQuery.fx.stop = function() { + inProgress = null; +}; + +jQuery.fx.speeds = { + slow: 600, + fast: 200, + + // Default speed + _default: 400 +}; + + +// Based off of the plugin by Clint Helfers, with permission. +// https://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/ +jQuery.fn.delay = function( time, type ) { + time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; + type = type || "fx"; + + return this.queue( type, function( next, hooks ) { + var timeout = window.setTimeout( next, time ); + hooks.stop = function() { + window.clearTimeout( timeout ); + }; + } ); +}; + + +( function() { + var input = document.createElement( "input" ), + select = document.createElement( "select" ), + opt = select.appendChild( document.createElement( "option" ) ); + + input.type = "checkbox"; + + // Support: Android <=4.3 only + // Default value for a checkbox should be "on" + support.checkOn = input.value !== ""; + + // Support: IE <=11 only + // Must access selectedIndex to make default options select + support.optSelected = opt.selected; + + // Support: IE <=11 only + // An input loses its value after becoming a radio + input = document.createElement( "input" ); + input.value = "t"; + input.type = "radio"; + support.radioValue = input.value === "t"; +} )(); + + +var boolHook, + attrHandle = jQuery.expr.attrHandle; + +jQuery.fn.extend( { + attr: function( name, value ) { + return access( this, jQuery.attr, name, value, arguments.length > 1 ); + }, + + removeAttr: function( name ) { + return this.each( function() { + jQuery.removeAttr( this, name ); + } ); + } +} ); + +jQuery.extend( { + attr: function( elem, name, value ) { + var ret, hooks, + nType = elem.nodeType; + + // Don't get/set attributes on text, comment and attribute nodes + if ( nType === 3 || nType === 8 || nType === 2 ) { + return; + } + + // Fallback to prop when attributes are not supported + if ( typeof elem.getAttribute === "undefined" ) { + return jQuery.prop( elem, name, value ); + } + + // Attribute hooks are determined by the lowercase version + // Grab necessary hook if one is defined + if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { + hooks = jQuery.attrHooks[ name.toLowerCase() ] || + ( jQuery.expr.match.bool.test( name ) ? boolHook : undefined ); + } + + if ( value !== undefined ) { + if ( value === null ) { + jQuery.removeAttr( elem, name ); + return; + } + + if ( hooks && "set" in hooks && + ( ret = hooks.set( elem, value, name ) ) !== undefined ) { + return ret; + } + + elem.setAttribute( name, value + "" ); + return value; + } + + if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { + return ret; + } + + ret = jQuery.find.attr( elem, name ); + + // Non-existent attributes return null, we normalize to undefined + return ret == null ? undefined : ret; + }, + + attrHooks: { + type: { + set: function( elem, value ) { + if ( !support.radioValue && value === "radio" && + nodeName( elem, "input" ) ) { + var val = elem.value; + elem.setAttribute( "type", value ); + if ( val ) { + elem.value = val; + } + return value; + } + } + } + }, + + removeAttr: function( elem, value ) { + var name, + i = 0, + + // Attribute names can contain non-HTML whitespace characters + // https://html.spec.whatwg.org/multipage/syntax.html#attributes-2 + attrNames = value && value.match( rnothtmlwhite ); + + if ( attrNames && elem.nodeType === 1 ) { + while ( ( name = attrNames[ i++ ] ) ) { + elem.removeAttribute( name ); + } + } + } +} ); + +// Hooks for boolean attributes +boolHook = { + set: function( elem, value, name ) { + if ( value === false ) { + + // Remove boolean attributes when set to false + jQuery.removeAttr( elem, name ); + } else { + elem.setAttribute( name, name ); + } + return name; + } +}; + +jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( _i, name ) { + var getter = attrHandle[ name ] || jQuery.find.attr; + + attrHandle[ name ] = function( elem, name, isXML ) { + var ret, handle, + lowercaseName = name.toLowerCase(); + + if ( !isXML ) { + + // Avoid an infinite loop by temporarily removing this function from the getter + handle = attrHandle[ lowercaseName ]; + attrHandle[ lowercaseName ] = ret; + ret = getter( elem, name, isXML ) != null ? + lowercaseName : + null; + attrHandle[ lowercaseName ] = handle; + } + return ret; + }; +} ); + + + + +var rfocusable = /^(?:input|select|textarea|button)$/i, + rclickable = /^(?:a|area)$/i; + +jQuery.fn.extend( { + prop: function( name, value ) { + return access( this, jQuery.prop, name, value, arguments.length > 1 ); + }, + + removeProp: function( name ) { + return this.each( function() { + delete this[ jQuery.propFix[ name ] || name ]; + } ); + } +} ); + +jQuery.extend( { + prop: function( elem, name, value ) { + var ret, hooks, + nType = elem.nodeType; + + // Don't get/set properties on text, comment and attribute nodes + if ( nType === 3 || nType === 8 || nType === 2 ) { + return; + } + + if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { + + // Fix name and attach hooks + name = jQuery.propFix[ name ] || name; + hooks = jQuery.propHooks[ name ]; + } + + if ( value !== undefined ) { + if ( hooks && "set" in hooks && + ( ret = hooks.set( elem, value, name ) ) !== undefined ) { + return ret; + } + + return ( elem[ name ] = value ); + } + + if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { + return ret; + } + + return elem[ name ]; + }, + + propHooks: { + tabIndex: { + get: function( elem ) { + + // Support: IE <=9 - 11 only + // elem.tabIndex doesn't always return the + // correct value when it hasn't been explicitly set + // https://web.archive.org/web/20141116233347/http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ + // Use proper attribute retrieval(#12072) + var tabindex = jQuery.find.attr( elem, "tabindex" ); + + if ( tabindex ) { + return parseInt( tabindex, 10 ); + } + + if ( + rfocusable.test( elem.nodeName ) || + rclickable.test( elem.nodeName ) && + elem.href + ) { + return 0; + } + + return -1; + } + } + }, + + propFix: { + "for": "htmlFor", + "class": "className" + } +} ); + +// Support: IE <=11 only +// Accessing the selectedIndex property +// forces the browser to respect setting selected +// on the option +// The getter ensures a default option is selected +// when in an optgroup +// eslint rule "no-unused-expressions" is disabled for this code +// since it considers such accessions noop +if ( !support.optSelected ) { + jQuery.propHooks.selected = { + get: function( elem ) { + + /* eslint no-unused-expressions: "off" */ + + var parent = elem.parentNode; + if ( parent && parent.parentNode ) { + parent.parentNode.selectedIndex; + } + return null; + }, + set: function( elem ) { + + /* eslint no-unused-expressions: "off" */ + + var parent = elem.parentNode; + if ( parent ) { + parent.selectedIndex; + + if ( parent.parentNode ) { + parent.parentNode.selectedIndex; + } + } + } + }; +} + +jQuery.each( [ + "tabIndex", + "readOnly", + "maxLength", + "cellSpacing", + "cellPadding", + "rowSpan", + "colSpan", + "useMap", + "frameBorder", + "contentEditable" +], function() { + jQuery.propFix[ this.toLowerCase() ] = this; +} ); + + + + + // Strip and collapse whitespace according to HTML spec + // https://infra.spec.whatwg.org/#strip-and-collapse-ascii-whitespace + function stripAndCollapse( value ) { + var tokens = value.match( rnothtmlwhite ) || []; + return tokens.join( " " ); + } + + +function getClass( elem ) { + return elem.getAttribute && elem.getAttribute( "class" ) || ""; +} + +function PyCTBNToArray( value ) { + if ( Array.isArray( value ) ) { + return value; + } + if ( typeof value === "string" ) { + return value.match( rnothtmlwhite ) || []; + } + return []; +} + +jQuery.fn.extend( { + addClass: function( value ) { + var PyCTBN, elem, cur, curValue, clazz, j, finalValue, + i = 0; + + if ( isFunction( value ) ) { + return this.each( function( j ) { + jQuery( this ).addClass( value.call( this, j, getClass( this ) ) ); + } ); + } + + PyCTBN = PyCTBNToArray( value ); + + if ( PyCTBN.length ) { + while ( ( elem = this[ i++ ] ) ) { + curValue = getClass( elem ); + cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); + + if ( cur ) { + j = 0; + while ( ( clazz = PyCTBN[ j++ ] ) ) { + if ( cur.indexOf( " " + clazz + " " ) < 0 ) { + cur += clazz + " "; + } + } + + // Only assign if different to avoid unneeded rendering. + finalValue = stripAndCollapse( cur ); + if ( curValue !== finalValue ) { + elem.setAttribute( "class", finalValue ); + } + } + } + } + + return this; + }, + + removeClass: function( value ) { + var PyCTBN, elem, cur, curValue, clazz, j, finalValue, + i = 0; + + if ( isFunction( value ) ) { + return this.each( function( j ) { + jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) ); + } ); + } + + if ( !arguments.length ) { + return this.attr( "class", "" ); + } + + PyCTBN = PyCTBNToArray( value ); + + if ( PyCTBN.length ) { + while ( ( elem = this[ i++ ] ) ) { + curValue = getClass( elem ); + + // This expression is here for better compressibility (see addClass) + cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); + + if ( cur ) { + j = 0; + while ( ( clazz = PyCTBN[ j++ ] ) ) { + + // Remove *all* instances + while ( cur.indexOf( " " + clazz + " " ) > -1 ) { + cur = cur.replace( " " + clazz + " ", " " ); + } + } + + // Only assign if different to avoid unneeded rendering. + finalValue = stripAndCollapse( cur ); + if ( curValue !== finalValue ) { + elem.setAttribute( "class", finalValue ); + } + } + } + } + + return this; + }, + + toggleClass: function( value, stateVal ) { + var type = typeof value, + isValidValue = type === "string" || Array.isArray( value ); + + if ( typeof stateVal === "boolean" && isValidValue ) { + return stateVal ? this.addClass( value ) : this.removeClass( value ); + } + + if ( isFunction( value ) ) { + return this.each( function( i ) { + jQuery( this ).toggleClass( + value.call( this, i, getClass( this ), stateVal ), + stateVal + ); + } ); + } + + return this.each( function() { + var className, i, self, classNames; + + if ( isValidValue ) { + + // Toggle individual class names + i = 0; + self = jQuery( this ); + classNames = PyCTBNToArray( value ); + + while ( ( className = classNames[ i++ ] ) ) { + + // Check each className given, space separated list + if ( self.hasClass( className ) ) { + self.removeClass( className ); + } else { + self.addClass( className ); + } + } + + // Toggle whole class name + } else if ( value === undefined || type === "boolean" ) { + className = getClass( this ); + if ( className ) { + + // Store className if set + dataPriv.set( this, "__className__", className ); + } + + // If the element has a class name or if we're passed `false`, + // then remove the whole classname (if there was one, the above saved it). + // Otherwise bring back whatever was previously saved (if anything), + // falling back to the empty string if nothing was stored. + if ( this.setAttribute ) { + this.setAttribute( "class", + className || value === false ? + "" : + dataPriv.get( this, "__className__" ) || "" + ); + } + } + } ); + }, + + hasClass: function( selector ) { + var className, elem, + i = 0; + + className = " " + selector + " "; + while ( ( elem = this[ i++ ] ) ) { + if ( elem.nodeType === 1 && + ( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) { + return true; + } + } + + return false; + } +} ); + + + + +var rreturn = /\r/g; + +jQuery.fn.extend( { + val: function( value ) { + var hooks, ret, valueIsFunction, + elem = this[ 0 ]; + + if ( !arguments.length ) { + if ( elem ) { + hooks = jQuery.valHooks[ elem.type ] || + jQuery.valHooks[ elem.nodeName.toLowerCase() ]; + + if ( hooks && + "get" in hooks && + ( ret = hooks.get( elem, "value" ) ) !== undefined + ) { + return ret; + } + + ret = elem.value; + + // Handle most common string cases + if ( typeof ret === "string" ) { + return ret.replace( rreturn, "" ); + } + + // Handle cases where value is null/undef or number + return ret == null ? "" : ret; + } + + return; + } + + valueIsFunction = isFunction( value ); + + return this.each( function( i ) { + var val; + + if ( this.nodeType !== 1 ) { + return; + } + + if ( valueIsFunction ) { + val = value.call( this, i, jQuery( this ).val() ); + } else { + val = value; + } + + // Treat null/undefined as ""; convert numbers to string + if ( val == null ) { + val = ""; + + } else if ( typeof val === "number" ) { + val += ""; + + } else if ( Array.isArray( val ) ) { + val = jQuery.map( val, function( value ) { + return value == null ? "" : value + ""; + } ); + } + + hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; + + // If set returns undefined, fall back to normal setting + if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) { + this.value = val; + } + } ); + } +} ); + +jQuery.extend( { + valHooks: { + option: { + get: function( elem ) { + + var val = jQuery.find.attr( elem, "value" ); + return val != null ? + val : + + // Support: IE <=10 - 11 only + // option.text throws exceptions (#14686, #14858) + // Strip and collapse whitespace + // https://html.spec.whatwg.org/#strip-and-collapse-whitespace + stripAndCollapse( jQuery.text( elem ) ); + } + }, + select: { + get: function( elem ) { + var value, option, i, + options = elem.options, + index = elem.selectedIndex, + one = elem.type === "select-one", + values = one ? null : [], + max = one ? index + 1 : options.length; + + if ( index < 0 ) { + i = max; + + } else { + i = one ? index : 0; + } + + // Loop through all the selected options + for ( ; i < max; i++ ) { + option = options[ i ]; + + // Support: IE <=9 only + // IE8-9 doesn't update selected after form reset (#2551) + if ( ( option.selected || i === index ) && + + // Don't return options that are disabled or in a disabled optgroup + !option.disabled && + ( !option.parentNode.disabled || + !nodeName( option.parentNode, "optgroup" ) ) ) { + + // Get the specific value for the option + value = jQuery( option ).val(); + + // We don't need an array for one selects + if ( one ) { + return value; + } + + // Multi-Selects return an array + values.push( value ); + } + } + + return values; + }, + + set: function( elem, value ) { + var optionSet, option, + options = elem.options, + values = jQuery.makeArray( value ), + i = options.length; + + while ( i-- ) { + option = options[ i ]; + + /* eslint-disable no-cond-assign */ + + if ( option.selected = + jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 + ) { + optionSet = true; + } + + /* eslint-enable no-cond-assign */ + } + + // Force browsers to behave consistently when non-matching value is set + if ( !optionSet ) { + elem.selectedIndex = -1; + } + return values; + } + } + } +} ); + +// Radios and checkboxes getter/setter +jQuery.each( [ "radio", "checkbox" ], function() { + jQuery.valHooks[ this ] = { + set: function( elem, value ) { + if ( Array.isArray( value ) ) { + return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 ); + } + } + }; + if ( !support.checkOn ) { + jQuery.valHooks[ this ].get = function( elem ) { + return elem.getAttribute( "value" ) === null ? "on" : elem.value; + }; + } +} ); + + + + +// Return jQuery for attributes-only inclusion + + +support.focusin = "onfocusin" in window; + + +var rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, + stopPropagationCallback = function( e ) { + e.stopPropagation(); + }; + +jQuery.extend( jQuery.event, { + + trigger: function( event, data, elem, onlyHandlers ) { + + var i, cur, tmp, bubbleType, ontype, handle, special, lastElement, + eventPath = [ elem || document ], + type = hasOwn.call( event, "type" ) ? event.type : event, + namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : []; + + cur = lastElement = tmp = elem = elem || document; + + // Don't do events on text and comment nodes + if ( elem.nodeType === 3 || elem.nodeType === 8 ) { + return; + } + + // focus/blur morphs to focusin/out; ensure we're not firing them right now + if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { + return; + } + + if ( type.indexOf( "." ) > -1 ) { + + // Namespaced trigger; create a regexp to match event type in handle() + namespaces = type.split( "." ); + type = namespaces.shift(); + namespaces.sort(); + } + ontype = type.indexOf( ":" ) < 0 && "on" + type; + + // Caller can pass in a jQuery.Event object, Object, or just an event type string + event = event[ jQuery.expando ] ? + event : + new jQuery.Event( type, typeof event === "object" && event ); + + // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) + event.isTrigger = onlyHandlers ? 2 : 3; + event.namespace = namespaces.join( "." ); + event.rnamespace = event.namespace ? + new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) : + null; + + // Clean up the event in case it is being reused + event.result = undefined; + if ( !event.target ) { + event.target = elem; + } + + // Clone any incoming data and prepend the event, creating the handler arg list + data = data == null ? + [ event ] : + jQuery.makeArray( data, [ event ] ); + + // Allow special events to draw outside the lines + special = jQuery.event.special[ type ] || {}; + if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { + return; + } + + // Determine event propagation path in advance, per W3C events spec (#9951) + // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) + if ( !onlyHandlers && !special.noBubble && !isWindow( elem ) ) { + + bubbleType = special.delegateType || type; + if ( !rfocusMorph.test( bubbleType + type ) ) { + cur = cur.parentNode; + } + for ( ; cur; cur = cur.parentNode ) { + eventPath.push( cur ); + tmp = cur; + } + + // Only add window if we got to document (e.g., not plain obj or detached DOM) + if ( tmp === ( elem.ownerDocument || document ) ) { + eventPath.push( tmp.defaultView || tmp.parentWindow || window ); + } + } + + // Fire handlers on the event path + i = 0; + while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) { + lastElement = cur; + event.type = i > 1 ? + bubbleType : + special.bindType || type; + + // jQuery handler + handle = ( + dataPriv.get( cur, "events" ) || Object.create( null ) + )[ event.type ] && + dataPriv.get( cur, "handle" ); + if ( handle ) { + handle.apply( cur, data ); + } + + // Native handler + handle = ontype && cur[ ontype ]; + if ( handle && handle.apply && acceptData( cur ) ) { + event.result = handle.apply( cur, data ); + if ( event.result === false ) { + event.preventDefault(); + } + } + } + event.type = type; + + // If nobody prevented the default action, do it now + if ( !onlyHandlers && !event.isDefaultPrevented() ) { + + if ( ( !special._default || + special._default.apply( eventPath.pop(), data ) === false ) && + acceptData( elem ) ) { + + // Call a native DOM method on the target with the same name as the event. + // Don't do default actions on window, that's where global variables be (#6170) + if ( ontype && isFunction( elem[ type ] ) && !isWindow( elem ) ) { + + // Don't re-trigger an onFOO event when we call its FOO() method + tmp = elem[ ontype ]; + + if ( tmp ) { + elem[ ontype ] = null; + } + + // Prevent re-triggering of the same event, since we already bubbled it above + jQuery.event.triggered = type; + + if ( event.isPropagationStopped() ) { + lastElement.addEventListener( type, stopPropagationCallback ); + } + + elem[ type ](); + + if ( event.isPropagationStopped() ) { + lastElement.removeEventListener( type, stopPropagationCallback ); + } + + jQuery.event.triggered = undefined; + + if ( tmp ) { + elem[ ontype ] = tmp; + } + } + } + } + + return event.result; + }, + + // Piggyback on a donor event to simulate a different one + // Used only for `focus(in | out)` events + simulate: function( type, elem, event ) { + var e = jQuery.extend( + new jQuery.Event(), + event, + { + type: type, + isSimulated: true + } + ); + + jQuery.event.trigger( e, null, elem ); + } + +} ); + +jQuery.fn.extend( { + + trigger: function( type, data ) { + return this.each( function() { + jQuery.event.trigger( type, data, this ); + } ); + }, + triggerHandler: function( type, data ) { + var elem = this[ 0 ]; + if ( elem ) { + return jQuery.event.trigger( type, data, elem, true ); + } + } +} ); + + +// Support: Firefox <=44 +// Firefox doesn't have focus(in | out) events +// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787 +// +// Support: Chrome <=48 - 49, Safari <=9.0 - 9.1 +// focus(in | out) events fire after focus & blur events, +// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order +// Related ticket - https://bugs.chromium.org/p/chromium/issues/detail?id=449857 +if ( !support.focusin ) { + jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) { + + // Attach a single capturing handler on the document while someone wants focusin/focusout + var handler = function( event ) { + jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) ); + }; + + jQuery.event.special[ fix ] = { + setup: function() { + + // Handle: regular nodes (via `this.ownerDocument`), window + // (via `this.document`) & document (via `this`). + var doc = this.ownerDocument || this.document || this, + attaches = dataPriv.access( doc, fix ); + + if ( !attaches ) { + doc.addEventListener( orig, handler, true ); + } + dataPriv.access( doc, fix, ( attaches || 0 ) + 1 ); + }, + teardown: function() { + var doc = this.ownerDocument || this.document || this, + attaches = dataPriv.access( doc, fix ) - 1; + + if ( !attaches ) { + doc.removeEventListener( orig, handler, true ); + dataPriv.remove( doc, fix ); + + } else { + dataPriv.access( doc, fix, attaches ); + } + } + }; + } ); +} +var location = window.location; + +var nonce = { guid: Date.now() }; + +var rquery = ( /\?/ ); + + + +// Cross-browser xml parsing +jQuery.parseXML = function( data ) { + var xml; + if ( !data || typeof data !== "string" ) { + return null; + } + + // Support: IE 9 - 11 only + // IE throws on parseFromString with invalid input. + try { + xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" ); + } catch ( e ) { + xml = undefined; + } + + if ( !xml || xml.getElementsByTagName( "parsererror" ).length ) { + jQuery.error( "Invalid XML: " + data ); + } + return xml; +}; + + +var + rbracket = /\[\]$/, + rCRLF = /\r?\n/g, + rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i, + rsubmittable = /^(?:input|select|textarea|keygen)/i; + +function buildParams( prefix, obj, traditional, add ) { + var name; + + if ( Array.isArray( obj ) ) { + + // Serialize array item. + jQuery.each( obj, function( i, v ) { + if ( traditional || rbracket.test( prefix ) ) { + + // Treat each array item as a scalar. + add( prefix, v ); + + } else { + + // Item is non-scalar (array or object), encode its numeric index. + buildParams( + prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]", + v, + traditional, + add + ); + } + } ); + + } else if ( !traditional && toType( obj ) === "object" ) { + + // Serialize object item. + for ( name in obj ) { + buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); + } + + } else { + + // Serialize scalar item. + add( prefix, obj ); + } +} + +// Serialize an array of form elements or a set of +// key/values into a query string +jQuery.param = function( a, traditional ) { + var prefix, + s = [], + add = function( key, valueOrFunction ) { + + // If value is a function, invoke it and use its return value + var value = isFunction( valueOrFunction ) ? + valueOrFunction() : + valueOrFunction; + + s[ s.length ] = encodeURIComponent( key ) + "=" + + encodeURIComponent( value == null ? "" : value ); + }; + + if ( a == null ) { + return ""; + } + + // If an array was passed in, assume that it is an array of form elements. + if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { + + // Serialize the form elements + jQuery.each( a, function() { + add( this.name, this.value ); + } ); + + } else { + + // If traditional, encode the "old" way (the way 1.3.2 or older + // did it), otherwise encode params recursively. + for ( prefix in a ) { + buildParams( prefix, a[ prefix ], traditional, add ); + } + } + + // Return the resulting serialization + return s.join( "&" ); +}; + +jQuery.fn.extend( { + serialize: function() { + return jQuery.param( this.serializeArray() ); + }, + serializeArray: function() { + return this.map( function() { + + // Can add propHook for "elements" to filter or add form elements + var elements = jQuery.prop( this, "elements" ); + return elements ? jQuery.makeArray( elements ) : this; + } ) + .filter( function() { + var type = this.type; + + // Use .is( ":disabled" ) so that fieldset[disabled] works + return this.name && !jQuery( this ).is( ":disabled" ) && + rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) && + ( this.checked || !rcheckableType.test( type ) ); + } ) + .map( function( _i, elem ) { + var val = jQuery( this ).val(); + + if ( val == null ) { + return null; + } + + if ( Array.isArray( val ) ) { + return jQuery.map( val, function( val ) { + return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; + } ); + } + + return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; + } ).get(); + } +} ); + + +var + r20 = /%20/g, + rhash = /#.*$/, + rantiCache = /([?&])_=[^&]*/, + rheaders = /^(.*?):[ \t]*([^\r\n]*)$/mg, + + // #7653, #8125, #8152: local protocol detection + rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/, + rnoContent = /^(?:GET|HEAD)$/, + rprotocol = /^\/\//, + + /* Prefilters + * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example) + * 2) These are called: + * - BEFORE asking for a transport + * - AFTER param serialization (s.data is a string if s.processData is true) + * 3) key is the dataType + * 4) the catchall symbol "*" can be used + * 5) execution will start with transport dataType and THEN continue down to "*" if needed + */ + prefilters = {}, + + /* Transports bindings + * 1) key is the dataType + * 2) the catchall symbol "*" can be used + * 3) selection will start with transport dataType and THEN go to "*" if needed + */ + transports = {}, + + // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression + allTypes = "*/".concat( "*" ), + + // Anchor tag for parsing the document origin + originAnchor = document.createElement( "a" ); + originAnchor.href = location.href; + +// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport +function addToPrefiltersOrTransports( structure ) { + + // dataTypeExpression is optional and defaults to "*" + return function( dataTypeExpression, func ) { + + if ( typeof dataTypeExpression !== "string" ) { + func = dataTypeExpression; + dataTypeExpression = "*"; + } + + var dataType, + i = 0, + dataTypes = dataTypeExpression.toLowerCase().match( rnothtmlwhite ) || []; + + if ( isFunction( func ) ) { + + // For each dataType in the dataTypeExpression + while ( ( dataType = dataTypes[ i++ ] ) ) { + + // Prepend if requested + if ( dataType[ 0 ] === "+" ) { + dataType = dataType.slice( 1 ) || "*"; + ( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func ); + + // Otherwise append + } else { + ( structure[ dataType ] = structure[ dataType ] || [] ).push( func ); + } + } + } + }; +} + +// Base inspection function for prefilters and transports +function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) { + + var inspected = {}, + seekingTransport = ( structure === transports ); + + function inspect( dataType ) { + var selected; + inspected[ dataType ] = true; + jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) { + var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR ); + if ( typeof dataTypeOrTransport === "string" && + !seekingTransport && !inspected[ dataTypeOrTransport ] ) { + + options.dataTypes.unshift( dataTypeOrTransport ); + inspect( dataTypeOrTransport ); + return false; + } else if ( seekingTransport ) { + return !( selected = dataTypeOrTransport ); + } + } ); + return selected; + } + + return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" ); +} + +// A special extend for ajax options +// that takes "flat" options (not to be deep extended) +// Fixes #9887 +function ajaxExtend( target, src ) { + var key, deep, + flatOptions = jQuery.ajaxSettings.flatOptions || {}; + + for ( key in src ) { + if ( src[ key ] !== undefined ) { + ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ]; + } + } + if ( deep ) { + jQuery.extend( true, target, deep ); + } + + return target; +} + +/* Handles responses to an ajax request: + * - finds the right dataType (mediates between content-type and expected dataType) + * - returns the corresponding response + */ +function ajaxHandleResponses( s, jqXHR, responses ) { + + var ct, type, finalDataType, firstDataType, + contents = s.contents, + dataTypes = s.dataTypes; + + // Remove auto dataType and get content-type in the process + while ( dataTypes[ 0 ] === "*" ) { + dataTypes.shift(); + if ( ct === undefined ) { + ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" ); + } + } + + // Check if we're dealing with a known content-type + if ( ct ) { + for ( type in contents ) { + if ( contents[ type ] && contents[ type ].test( ct ) ) { + dataTypes.unshift( type ); + break; + } + } + } + + // Check to see if we have a response for the expected dataType + if ( dataTypes[ 0 ] in responses ) { + finalDataType = dataTypes[ 0 ]; + } else { + + // Try convertible dataTypes + for ( type in responses ) { + if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) { + finalDataType = type; + break; + } + if ( !firstDataType ) { + firstDataType = type; + } + } + + // Or just use first one + finalDataType = finalDataType || firstDataType; + } + + // If we found a dataType + // We add the dataType to the list if needed + // and return the corresponding response + if ( finalDataType ) { + if ( finalDataType !== dataTypes[ 0 ] ) { + dataTypes.unshift( finalDataType ); + } + return responses[ finalDataType ]; + } +} + +/* Chain conversions given the request and the original response + * Also sets the responseXXX fields on the jqXHR instance + */ +function ajaxConvert( s, response, jqXHR, isSuccess ) { + var conv2, current, conv, tmp, prev, + converters = {}, + + // Work with a copy of dataTypes in case we need to modify it for conversion + dataTypes = s.dataTypes.slice(); + + // Create converters map with lowercased keys + if ( dataTypes[ 1 ] ) { + for ( conv in s.converters ) { + converters[ conv.toLowerCase() ] = s.converters[ conv ]; + } + } + + current = dataTypes.shift(); + + // Convert to each sequential dataType + while ( current ) { + + if ( s.responseFields[ current ] ) { + jqXHR[ s.responseFields[ current ] ] = response; + } + + // Apply the dataFilter if provided + if ( !prev && isSuccess && s.dataFilter ) { + response = s.dataFilter( response, s.dataType ); + } + + prev = current; + current = dataTypes.shift(); + + if ( current ) { + + // There's only work to do if current dataType is non-auto + if ( current === "*" ) { + + current = prev; + + // Convert response if prev dataType is non-auto and differs from current + } else if ( prev !== "*" && prev !== current ) { + + // Seek a direct converter + conv = converters[ prev + " " + current ] || converters[ "* " + current ]; + + // If none found, seek a pair + if ( !conv ) { + for ( conv2 in converters ) { + + // If conv2 outputs current + tmp = conv2.split( " " ); + if ( tmp[ 1 ] === current ) { + + // If prev can be converted to accepted input + conv = converters[ prev + " " + tmp[ 0 ] ] || + converters[ "* " + tmp[ 0 ] ]; + if ( conv ) { + + // Condense equivalence converters + if ( conv === true ) { + conv = converters[ conv2 ]; + + // Otherwise, insert the intermediate dataType + } else if ( converters[ conv2 ] !== true ) { + current = tmp[ 0 ]; + dataTypes.unshift( tmp[ 1 ] ); + } + break; + } + } + } + } + + // Apply converter (if not an equivalence) + if ( conv !== true ) { + + // Unless errors are allowed to bubble, catch and return them + if ( conv && s.throws ) { + response = conv( response ); + } else { + try { + response = conv( response ); + } catch ( e ) { + return { + state: "parsererror", + error: conv ? e : "No conversion from " + prev + " to " + current + }; + } + } + } + } + } + } + + return { state: "success", data: response }; +} + +jQuery.extend( { + + // Counter for holding the number of active queries + active: 0, + + // Last-Modified header cache for next request + lastModified: {}, + etag: {}, + + ajaxSettings: { + url: location.href, + type: "GET", + isLocal: rlocalProtocol.test( location.protocol ), + global: true, + processData: true, + async: true, + contentType: "application/x-www-form-urlencoded; charset=UTF-8", + + /* + timeout: 0, + data: null, + dataType: null, + username: null, + password: null, + cache: null, + throws: false, + traditional: false, + headers: {}, + */ + + accepts: { + "*": allTypes, + text: "text/plain", + html: "text/html", + xml: "application/xml, text/xml", + json: "application/json, text/javascript" + }, + + contents: { + xml: /\bxml\b/, + html: /\bhtml/, + json: /\bjson\b/ + }, + + responseFields: { + xml: "responseXML", + text: "responseText", + json: "responseJSON" + }, + + // Data converters + // Keys separate source (or catchall "*") and destination types with a single space + converters: { + + // Convert anything to text + "* text": String, + + // Text to html (true = no transformation) + "text html": true, + + // Evaluate text as a json expression + "text json": JSON.parse, + + // Parse text as xml + "text xml": jQuery.parseXML + }, + + // For options that shouldn't be deep extended: + // you can add your own custom options here if + // and when you create one that shouldn't be + // deep extended (see ajaxExtend) + flatOptions: { + url: true, + context: true + } + }, + + // Creates a full fledged settings object into target + // with both ajaxSettings and settings fields. + // If target is omitted, writes into ajaxSettings. + ajaxSetup: function( target, settings ) { + return settings ? + + // Building a settings object + ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) : + + // Extending ajaxSettings + ajaxExtend( jQuery.ajaxSettings, target ); + }, + + ajaxPrefilter: addToPrefiltersOrTransports( prefilters ), + ajaxTransport: addToPrefiltersOrTransports( transports ), + + // Main method + ajax: function( url, options ) { + + // If url is an object, simulate pre-1.5 signature + if ( typeof url === "object" ) { + options = url; + url = undefined; + } + + // Force options to be an object + options = options || {}; + + var transport, + + // URL without anti-cache param + cacheURL, + + // Response headers + responseHeadersString, + responseHeaders, + + // timeout handle + timeoutTimer, + + // Url cleanup var + urlAnchor, + + // Request state (becomes false upon send and true upon completion) + completed, + + // To know if global events are to be dispatched + fireGlobals, + + // Loop variable + i, + + // uncached part of the url + uncached, + + // Create the final options object + s = jQuery.ajaxSetup( {}, options ), + + // Callbacks context + callbackContext = s.context || s, + + // Context for global events is callbackContext if it is a DOM node or jQuery collection + globalEventContext = s.context && + ( callbackContext.nodeType || callbackContext.jquery ) ? + jQuery( callbackContext ) : + jQuery.event, + + // Deferreds + deferred = jQuery.Deferred(), + completeDeferred = jQuery.Callbacks( "once memory" ), + + // Status-dependent callbacks + statusCode = s.statusCode || {}, + + // Headers (they are sent all at once) + requestHeaders = {}, + requestHeadersNames = {}, + + // Default abort message + strAbort = "canceled", + + // Fake xhr + jqXHR = { + readyState: 0, + + // Builds headers hashtable if needed + getResponseHeader: function( key ) { + var match; + if ( completed ) { + if ( !responseHeaders ) { + responseHeaders = {}; + while ( ( match = rheaders.exec( responseHeadersString ) ) ) { + responseHeaders[ match[ 1 ].toLowerCase() + " " ] = + ( responseHeaders[ match[ 1 ].toLowerCase() + " " ] || [] ) + .concat( match[ 2 ] ); + } + } + match = responseHeaders[ key.toLowerCase() + " " ]; + } + return match == null ? null : match.join( ", " ); + }, + + // Raw string + getAllResponseHeaders: function() { + return completed ? responseHeadersString : null; + }, + + // Caches the header + setRequestHeader: function( name, value ) { + if ( completed == null ) { + name = requestHeadersNames[ name.toLowerCase() ] = + requestHeadersNames[ name.toLowerCase() ] || name; + requestHeaders[ name ] = value; + } + return this; + }, + + // Overrides response content-type header + overrideMimeType: function( type ) { + if ( completed == null ) { + s.mimeType = type; + } + return this; + }, + + // Status-dependent callbacks + statusCode: function( map ) { + var code; + if ( map ) { + if ( completed ) { + + // Execute the appropriate callbacks + jqXHR.always( map[ jqXHR.status ] ); + } else { + + // Lazy-add the new callbacks in a way that preserves old ones + for ( code in map ) { + statusCode[ code ] = [ statusCode[ code ], map[ code ] ]; + } + } + } + return this; + }, + + // Cancel the request + abort: function( statusText ) { + var finalText = statusText || strAbort; + if ( transport ) { + transport.abort( finalText ); + } + done( 0, finalText ); + return this; + } + }; + + // Attach deferreds + deferred.promise( jqXHR ); + + // Add protocol if not provided (prefilters might expect it) + // Handle falsy url in the settings object (#10093: consistency with old signature) + // We also use the url parameter if available + s.url = ( ( url || s.url || location.href ) + "" ) + .replace( rprotocol, location.protocol + "//" ); + + // Alias method option to type as per ticket #12004 + s.type = options.method || options.type || s.method || s.type; + + // Extract dataTypes list + s.dataTypes = ( s.dataType || "*" ).toLowerCase().match( rnothtmlwhite ) || [ "" ]; + + // A cross-domain request is in order when the origin doesn't match the current origin. + if ( s.crossDomain == null ) { + urlAnchor = document.createElement( "a" ); + + // Support: IE <=8 - 11, Edge 12 - 15 + // IE throws exception on accessing the href property if url is malformed, + // e.g. http://example.com:80x/ + try { + urlAnchor.href = s.url; + + // Support: IE <=8 - 11 only + // Anchor's host property isn't correctly set when s.url is relative + urlAnchor.href = urlAnchor.href; + s.crossDomain = originAnchor.protocol + "//" + originAnchor.host !== + urlAnchor.protocol + "//" + urlAnchor.host; + } catch ( e ) { + + // If there is an error parsing the URL, assume it is crossDomain, + // it can be rejected by the transport if it is invalid + s.crossDomain = true; + } + } + + // Convert data if not already a string + if ( s.data && s.processData && typeof s.data !== "string" ) { + s.data = jQuery.param( s.data, s.traditional ); + } + + // Apply prefilters + inspectPrefiltersOrTransports( prefilters, s, options, jqXHR ); + + // If request was aborted inside a prefilter, stop there + if ( completed ) { + return jqXHR; + } + + // We can fire global events as of now if asked to + // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118) + fireGlobals = jQuery.event && s.global; + + // Watch for a new set of requests + if ( fireGlobals && jQuery.active++ === 0 ) { + jQuery.event.trigger( "ajaxStart" ); + } + + // Uppercase the type + s.type = s.type.toUpperCase(); + + // Determine if request has content + s.hasContent = !rnoContent.test( s.type ); + + // Save the URL in case we're toying with the If-Modified-Since + // and/or If-None-Match header later on + // Remove hash to simplify url manipulation + cacheURL = s.url.replace( rhash, "" ); + + // More options handling for requests with no content + if ( !s.hasContent ) { + + // Remember the hash so we can put it back + uncached = s.url.slice( cacheURL.length ); + + // If data is available and should be processed, append data to url + if ( s.data && ( s.processData || typeof s.data === "string" ) ) { + cacheURL += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data; + + // #9682: remove data so that it's not used in an eventual retry + delete s.data; + } + + // Add or update anti-cache param if needed + if ( s.cache === false ) { + cacheURL = cacheURL.replace( rantiCache, "$1" ); + uncached = ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ( nonce.guid++ ) + + uncached; + } + + // Put hash and anti-cache on the URL that will be requested (gh-1732) + s.url = cacheURL + uncached; + + // Change '%20' to '+' if this is encoded form body content (gh-2658) + } else if ( s.data && s.processData && + ( s.contentType || "" ).indexOf( "application/x-www-form-urlencoded" ) === 0 ) { + s.data = s.data.replace( r20, "+" ); + } + + // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. + if ( s.ifModified ) { + if ( jQuery.lastModified[ cacheURL ] ) { + jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] ); + } + if ( jQuery.etag[ cacheURL ] ) { + jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] ); + } + } + + // Set the correct header, if data is being sent + if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) { + jqXHR.setRequestHeader( "Content-Type", s.contentType ); + } + + // Set the Accepts header for the server, depending on the dataType + jqXHR.setRequestHeader( + "Accept", + s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ? + s.accepts[ s.dataTypes[ 0 ] ] + + ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) : + s.accepts[ "*" ] + ); + + // Check for headers option + for ( i in s.headers ) { + jqXHR.setRequestHeader( i, s.headers[ i ] ); + } + + // Allow custom headers/mimetypes and early abort + if ( s.beforeSend && + ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || completed ) ) { + + // Abort if not done already and return + return jqXHR.abort(); + } + + // Aborting is no longer a cancellation + strAbort = "abort"; + + // Install callbacks on deferreds + completeDeferred.add( s.complete ); + jqXHR.done( s.success ); + jqXHR.fail( s.error ); + + // Get transport + transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR ); + + // If no transport, we auto-abort + if ( !transport ) { + done( -1, "No Transport" ); + } else { + jqXHR.readyState = 1; + + // Send global event + if ( fireGlobals ) { + globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] ); + } + + // If request was aborted inside ajaxSend, stop there + if ( completed ) { + return jqXHR; + } + + // Timeout + if ( s.async && s.timeout > 0 ) { + timeoutTimer = window.setTimeout( function() { + jqXHR.abort( "timeout" ); + }, s.timeout ); + } + + try { + completed = false; + transport.send( requestHeaders, done ); + } catch ( e ) { + + // Rethrow post-completion exceptions + if ( completed ) { + throw e; + } + + // Propagate others as results + done( -1, e ); + } + } + + // Callback for when everything is done + function done( status, nativeStatusText, responses, headers ) { + var isSuccess, success, error, response, modified, + statusText = nativeStatusText; + + // Ignore repeat invocations + if ( completed ) { + return; + } + + completed = true; + + // Clear timeout if it exists + if ( timeoutTimer ) { + window.clearTimeout( timeoutTimer ); + } + + // Dereference transport for early garbage collection + // (no matter how long the jqXHR object will be used) + transport = undefined; + + // Cache response headers + responseHeadersString = headers || ""; + + // Set readyState + jqXHR.readyState = status > 0 ? 4 : 0; + + // Determine if successful + isSuccess = status >= 200 && status < 300 || status === 304; + + // Get response data + if ( responses ) { + response = ajaxHandleResponses( s, jqXHR, responses ); + } + + // Use a noop converter for missing script + if ( !isSuccess && jQuery.inArray( "script", s.dataTypes ) > -1 ) { + s.converters[ "text script" ] = function() {}; + } + + // Convert no matter what (that way responseXXX fields are always set) + response = ajaxConvert( s, response, jqXHR, isSuccess ); + + // If successful, handle type chaining + if ( isSuccess ) { + + // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. + if ( s.ifModified ) { + modified = jqXHR.getResponseHeader( "Last-Modified" ); + if ( modified ) { + jQuery.lastModified[ cacheURL ] = modified; + } + modified = jqXHR.getResponseHeader( "etag" ); + if ( modified ) { + jQuery.etag[ cacheURL ] = modified; + } + } + + // if no content + if ( status === 204 || s.type === "HEAD" ) { + statusText = "nocontent"; + + // if not modified + } else if ( status === 304 ) { + statusText = "notmodified"; + + // If we have data, let's convert it + } else { + statusText = response.state; + success = response.data; + error = response.error; + isSuccess = !error; + } + } else { + + // Extract error from statusText and normalize for non-aborts + error = statusText; + if ( status || !statusText ) { + statusText = "error"; + if ( status < 0 ) { + status = 0; + } + } + } + + // Set data for the fake xhr object + jqXHR.status = status; + jqXHR.statusText = ( nativeStatusText || statusText ) + ""; + + // Success/Error + if ( isSuccess ) { + deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] ); + } else { + deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] ); + } + + // Status-dependent callbacks + jqXHR.statusCode( statusCode ); + statusCode = undefined; + + if ( fireGlobals ) { + globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError", + [ jqXHR, s, isSuccess ? success : error ] ); + } + + // Complete + completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] ); + + if ( fireGlobals ) { + globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] ); + + // Handle the global AJAX counter + if ( !( --jQuery.active ) ) { + jQuery.event.trigger( "ajaxStop" ); + } + } + } + + return jqXHR; + }, + + getJSON: function( url, data, callback ) { + return jQuery.get( url, data, callback, "json" ); + }, + + getScript: function( url, callback ) { + return jQuery.get( url, undefined, callback, "script" ); + } +} ); + +jQuery.each( [ "get", "post" ], function( _i, method ) { + jQuery[ method ] = function( url, data, callback, type ) { + + // Shift arguments if data argument was omitted + if ( isFunction( data ) ) { + type = type || callback; + callback = data; + data = undefined; + } + + // The url can be an options object (which then must have .url) + return jQuery.ajax( jQuery.extend( { + url: url, + type: method, + dataType: type, + data: data, + success: callback + }, jQuery.isPlainObject( url ) && url ) ); + }; +} ); + +jQuery.ajaxPrefilter( function( s ) { + var i; + for ( i in s.headers ) { + if ( i.toLowerCase() === "content-type" ) { + s.contentType = s.headers[ i ] || ""; + } + } +} ); + + +jQuery._evalUrl = function( url, options, doc ) { + return jQuery.ajax( { + url: url, + + // Make this explicit, since user can override this through ajaxSetup (#11264) + type: "GET", + dataType: "script", + cache: true, + async: false, + global: false, + + // Only evaluate the response if it is successful (gh-4126) + // dataFilter is not invoked for failure responses, so using it instead + // of the default converter is kludgy but it works. + converters: { + "text script": function() {} + }, + dataFilter: function( response ) { + jQuery.globalEval( response, options, doc ); + } + } ); +}; + + +jQuery.fn.extend( { + wrapAll: function( html ) { + var wrap; + + if ( this[ 0 ] ) { + if ( isFunction( html ) ) { + html = html.call( this[ 0 ] ); + } + + // The elements to wrap the target around + wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true ); + + if ( this[ 0 ].parentNode ) { + wrap.insertBefore( this[ 0 ] ); + } + + wrap.map( function() { + var elem = this; + + while ( elem.firstElementChild ) { + elem = elem.firstElementChild; + } + + return elem; + } ).append( this ); + } + + return this; + }, + + wrapInner: function( html ) { + if ( isFunction( html ) ) { + return this.each( function( i ) { + jQuery( this ).wrapInner( html.call( this, i ) ); + } ); + } + + return this.each( function() { + var self = jQuery( this ), + contents = self.contents(); + + if ( contents.length ) { + contents.wrapAll( html ); + + } else { + self.append( html ); + } + } ); + }, + + wrap: function( html ) { + var htmlIsFunction = isFunction( html ); + + return this.each( function( i ) { + jQuery( this ).wrapAll( htmlIsFunction ? html.call( this, i ) : html ); + } ); + }, + + unwrap: function( selector ) { + this.parent( selector ).not( "body" ).each( function() { + jQuery( this ).replaceWith( this.childNodes ); + } ); + return this; + } +} ); + + +jQuery.expr.pseudos.hidden = function( elem ) { + return !jQuery.expr.pseudos.visible( elem ); +}; +jQuery.expr.pseudos.visible = function( elem ) { + return !!( elem.offsetWidth || elem.offsetHeight || elem.getClientRects().length ); +}; + + + + +jQuery.ajaxSettings.xhr = function() { + try { + return new window.XMLHttpRequest(); + } catch ( e ) {} +}; + +var xhrSuccessStatus = { + + // File protocol always yields status code 0, assume 200 + 0: 200, + + // Support: IE <=9 only + // #1450: sometimes IE returns 1223 when it should be 204 + 1223: 204 + }, + xhrSupported = jQuery.ajaxSettings.xhr(); + +support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported ); +support.ajax = xhrSupported = !!xhrSupported; + +jQuery.ajaxTransport( function( options ) { + var callback, errorCallback; + + // Cross domain only allowed if supported through XMLHttpRequest + if ( support.cors || xhrSupported && !options.crossDomain ) { + return { + send: function( headers, complete ) { + var i, + xhr = options.xhr(); + + xhr.open( + options.type, + options.url, + options.async, + options.username, + options.password + ); + + // Apply custom fields if provided + if ( options.xhrFields ) { + for ( i in options.xhrFields ) { + xhr[ i ] = options.xhrFields[ i ]; + } + } + + // Override mime type if needed + if ( options.mimeType && xhr.overrideMimeType ) { + xhr.overrideMimeType( options.mimeType ); + } + + // X-Requested-With header + // For cross-domain requests, seeing as conditions for a preflight are + // akin to a jigsaw puzzle, we simply never set it to be sure. + // (it can always be set on a per-request basis or even using ajaxSetup) + // For same-domain requests, won't change header if already provided. + if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) { + headers[ "X-Requested-With" ] = "XMLHttpRequest"; + } + + // Set headers + for ( i in headers ) { + xhr.setRequestHeader( i, headers[ i ] ); + } + + // Callback + callback = function( type ) { + return function() { + if ( callback ) { + callback = errorCallback = xhr.onload = + xhr.onerror = xhr.onabort = xhr.ontimeout = + xhr.onreadystatechange = null; + + if ( type === "abort" ) { + xhr.abort(); + } else if ( type === "error" ) { + + // Support: IE <=9 only + // On a manual native abort, IE9 throws + // errors on any property access that is not readyState + if ( typeof xhr.status !== "number" ) { + complete( 0, "error" ); + } else { + complete( + + // File: protocol always yields status 0; see #8605, #14207 + xhr.status, + xhr.statusText + ); + } + } else { + complete( + xhrSuccessStatus[ xhr.status ] || xhr.status, + xhr.statusText, + + // Support: IE <=9 only + // IE9 has no XHR2 but throws on binary (trac-11426) + // For XHR2 non-text, let the caller handle it (gh-2498) + ( xhr.responseType || "text" ) !== "text" || + typeof xhr.responseText !== "string" ? + { binary: xhr.response } : + { text: xhr.responseText }, + xhr.getAllResponseHeaders() + ); + } + } + }; + }; + + // Listen to events + xhr.onload = callback(); + errorCallback = xhr.onerror = xhr.ontimeout = callback( "error" ); + + // Support: IE 9 only + // Use onreadystatechange to replace onabort + // to handle uncaught aborts + if ( xhr.onabort !== undefined ) { + xhr.onabort = errorCallback; + } else { + xhr.onreadystatechange = function() { + + // Check readyState before timeout as it changes + if ( xhr.readyState === 4 ) { + + // Allow onerror to be called first, + // but that will not handle a native abort + // Also, save errorCallback to a variable + // as xhr.onerror cannot be accessed + window.setTimeout( function() { + if ( callback ) { + errorCallback(); + } + } ); + } + }; + } + + // Create the abort callback + callback = callback( "abort" ); + + try { + + // Do send the request (this may raise an exception) + xhr.send( options.hasContent && options.data || null ); + } catch ( e ) { + + // #14683: Only rethrow if this hasn't been notified as an error yet + if ( callback ) { + throw e; + } + } + }, + + abort: function() { + if ( callback ) { + callback(); + } + } + }; + } +} ); + + + + +// Prevent auto-execution of scripts when no explicit dataType was provided (See gh-2432) +jQuery.ajaxPrefilter( function( s ) { + if ( s.crossDomain ) { + s.contents.script = false; + } +} ); + +// Install script dataType +jQuery.ajaxSetup( { + accepts: { + script: "text/javascript, application/javascript, " + + "application/ecmascript, application/x-ecmascript" + }, + contents: { + script: /\b(?:java|ecma)script\b/ + }, + converters: { + "text script": function( text ) { + jQuery.globalEval( text ); + return text; + } + } +} ); + +// Handle cache's special case and crossDomain +jQuery.ajaxPrefilter( "script", function( s ) { + if ( s.cache === undefined ) { + s.cache = false; + } + if ( s.crossDomain ) { + s.type = "GET"; + } +} ); + +// Bind script tag hack transport +jQuery.ajaxTransport( "script", function( s ) { + + // This transport only deals with cross domain or forced-by-attrs requests + if ( s.crossDomain || s.scriptAttrs ) { + var script, callback; + return { + send: function( _, complete ) { + script = jQuery( " + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+
+

PyCTBN.estimators package

+
+

Submodules

+
+
+

PyCTBN.estimators.fam_score_calculator module

+
+
+class PyCTBN.estimators.fam_score_calculator.FamScoreCalculator
+

Bases: object

+

Has the task of calculating the FamScore of a node by using a Bayesian score function

+
+
+get_fam_score(cims: numpy.array, tau_xu: float = 0.1, alpha_xu: float = 1)
+

Calculate the FamScore value of the node

+
+
Parameters
+
    +
  • cims (np.array) – np.array with all the node’s cims

  • +
  • tau_xu (float, optional) – hyperparameter over the CTBN’s q parameters, default to 0.1

  • +
  • alpha_xu (float, optional) – hyperparameter over the CTBN’s q parameters, default to 1

  • +
+
+
Returns
+

the FamScore value of the node

+
+
Return type
+

float

+
+
+
+ +
+
+marginal_likelihood_q(cims: numpy.array, tau_xu: float = 0.1, alpha_xu: float = 1)
+

Calculate the value of the marginal likelihood over q of the node identified by the label node_id

+
+
Parameters
+
    +
  • cims (np.array) – np.array with all the node’s cims

  • +
  • tau_xu (float) – hyperparameter over the CTBN’s q parameters

  • +
  • alpha_xu (float) – hyperparameter over the CTBN’s q parameters

  • +
+
+
Returns
+

the value of the marginal likelihood over q

+
+
Return type
+

float

+
+
+
+ +
+
+marginal_likelihood_theta(cims: PyCTBN.structure_graph.conditional_intensity_matrix.ConditionalIntensityMatrix, alpha_xu: float, alpha_xxu: float)
+

Calculate the FamScore value of the node identified by the label node_id

+
+
Parameters
+
    +
  • cims (np.array) – np.array with all the node’s cims

  • +
  • alpha_xu (float) – hyperparameter over the CTBN’s q parameters, default to 0.1

  • +
  • alpha_xxu (float) – distribuited hyperparameter over the CTBN’s theta parameters

  • +
+
+
Returns
+

the value of the marginal likelihood over theta

+
+
Return type
+

float

+
+
+
+ +
+
+single_cim_xu_marginal_likelihood_q(M_xu_suff_stats: float, T_xu_suff_stats: float, tau_xu: float = 0.1, alpha_xu: float = 1)
+

Calculate the marginal likelihood on q of the node when assumes a specif value +and a specif parents’s assignment

+
+
Parameters
+
    +
  • M_xu_suff_stats – value of the suffucient statistic M[x|u]

  • +
  • T_xu_suff_stats (float) – value of the suffucient statistic T[x|u]

  • +
  • cim (class:'ConditionalIntensityMatrix') – A conditional_intensity_matrix object with the sufficient statistics

  • +
  • tau_xu (float) – hyperparameter over the CTBN’s q parameters

  • +
  • alpha_xu (float) – hyperparameter over the CTBN’s q parameters

  • +
+
+
Returns
+

the value of the marginal likelihood of the node when assumes a specif value

+
+
Return type
+

float

+
+
+
+ +
+
+single_cim_xu_marginal_likelihood_theta(index: int, cim: PyCTBN.structure_graph.conditional_intensity_matrix.ConditionalIntensityMatrix, alpha_xu: float, alpha_xxu: float)
+

Calculate the marginal likelihood on q of the node when assumes a specif value +and a specif parents’s assignment

+
+
Parameters
+
    +
  • cim (class:'ConditionalIntensityMatrix') – A conditional_intensity_matrix object with the sufficient statistics

  • +
  • alpha_xu (float) – hyperparameter over the CTBN’s q parameters

  • +
  • alpha_xxu (float) – distribuited hyperparameter over the CTBN’s theta parameters

  • +
+
+
Returns
+

the value of the marginal likelihood over theta when the node assumes a specif value

+
+
Return type
+

float

+
+
+
+ +
+
+single_internal_cim_xxu_marginal_likelihood_theta(M_xxu_suff_stats: float, alpha_xxu: float = 1)
+

Calculate the second part of the marginal likelihood over theta formula

+
+
Parameters
+
    +
  • M_xxu_suff_stats (float) – value of the suffucient statistic M[xx’|u]

  • +
  • alpha_xxu (float) – distribuited hyperparameter over the CTBN’s theta parameters

  • +
+
+
Returns
+

the value of the marginal likelihood over theta when the node assumes a specif value

+
+
Return type
+

float

+
+
+
+ +
+
+variable_cim_xu_marginal_likelihood_q(cim: PyCTBN.structure_graph.conditional_intensity_matrix.ConditionalIntensityMatrix, tau_xu: float = 0.1, alpha_xu: float = 1)
+

Calculate the value of the marginal likelihood over q given a cim

+
+
Parameters
+
    +
  • cim (class:'ConditionalIntensityMatrix') – A conditional_intensity_matrix object with the sufficient statistics

  • +
  • tau_xu (float) – hyperparameter over the CTBN’s q parameters

  • +
  • alpha_xu (float) – hyperparameter over the CTBN’s q parameters

  • +
+
+
Returns
+

the value of the marginal likelihood over q

+
+
Return type
+

float

+
+
+
+ +
+
+variable_cim_xu_marginal_likelihood_theta(cim: PyCTBN.structure_graph.conditional_intensity_matrix.ConditionalIntensityMatrix, alpha_xu: float, alpha_xxu: float)
+

Calculate the value of the marginal likelihood over theta given a cim

+
+
Parameters
+
    +
  • cim (class:'ConditionalIntensityMatrix') – A conditional_intensity_matrix object with the sufficient statistics

  • +
  • alpha_xu (float) – hyperparameter over the CTBN’s q parameters, default to 0.1

  • +
  • alpha_xxu (float) – distribuited hyperparameter over the CTBN’s theta parameters

  • +
+
+
Returns
+

the value of the marginal likelihood over theta

+
+
Return type
+

float

+
+
+
+ +
+ +
+
+

PyCTBN.estimators.parameters_estimator module

+
+
+class PyCTBN.estimators.parameters_estimator.ParametersEstimator(trajectories: PyCTBN.structure_graph.trajectory.Trajectory, net_graph: PyCTBN.structure_graph.network_graph.NetworkGraph)
+

Bases: object

+

Has the task of computing the cims of particular node given the trajectories and the net structure +in the graph _net_graph.

+
+
Parameters
+
+
+
_single_set_of_cims
+

the set of cims object that will hold the cims of the node

+
+
+
+
+compute_parameters()
+
+ +
+
+compute_parameters_for_node(node_id: str)PyCTBN.structure_graph.set_of_cims.SetOfCims
+

Compute the CIMS of the node identified by the label node_id.

+
+
Parameters
+

node_id (string) – the node label

+
+
Returns
+

A SetOfCims object filled with the computed CIMS

+
+
Return type
+

SetOfCims

+
+
+
+ +
+
+static compute_state_res_time_for_node(times: numpy.ndarray, trajectory: numpy.ndarray, cols_filter: numpy.ndarray, scalar_indexes_struct: numpy.ndarray, T: numpy.ndarray) → None
+

Compute the state residence times for a node and fill the matrix T with the results

+
+
Parameters
+
    +
  • node_indx (int) – the index of the node

  • +
  • times (numpy.array) – the times deltas vector

  • +
  • trajectory (numpy.ndArray) – the trajectory

  • +
  • cols_filter (numpy.array) – the columns filtering structure

  • +
  • scalar_indexes_struct (numpy.array) – the indexing structure

  • +
  • T (numpy.ndArray) – the state residence times vectors

  • +
+
+
+
+ +
+
+static compute_state_transitions_for_a_node(node_indx: int, trajectory: numpy.ndarray, cols_filter: numpy.ndarray, scalar_indexing: numpy.ndarray, M: numpy.ndarray) → None
+

Compute the state residence times for a node and fill the matrices M with the results.

+
+
Parameters
+
    +
  • node_indx (int) – the index of the node

  • +
  • trajectory (numpy.ndArray) – the trajectory

  • +
  • cols_filter (numpy.array) – the columns filtering structure

  • +
  • scalar_indexing (numpy.array) – the indexing structure

  • +
  • M (numpy.ndArray) – the state transitions matrices

  • +
+
+
+
+ +
+
+fast_init(node_id: str) → None
+

Initializes all the necessary structures for the parameters estimation for the node node_id.

+
+
Parameters
+

node_id (string) – the node label

+
+
+
+ +
+
+init_sets_cims_container()
+
+ +
+ +
+
+

PyCTBN.estimators.structure_constraint_based_estimator module

+
+
+class PyCTBN.estimators.structure_constraint_based_estimator.StructureConstraintBasedEstimator(sample_path: PyCTBN.structure_graph.sample_path.SamplePath, exp_test_alfa: float, chi_test_alfa: float, known_edges: List = [], thumb_threshold: int = 25)
+

Bases: PyCTBN.estimators.structure_estimator.StructureEstimator

+

Has the task of estimating the network structure given the trajectories in samplepath by using a constraint-based approach.

+
+
Parameters
+
    +
  • sample_path (SamplePath) – the _sample_path object containing the trajectories and the real structure

  • +
  • exp_test_alfa (float) – the significance level for the exponential Hp test

  • +
  • chi_test_alfa (float) – the significance level for the chi Hp test

  • +
+
+
_nodes
+

the nodes labels

+
+
_nodes_vals
+

the nodes cardinalities

+
+
_nodes_indxs
+

the nodes indexes

+
+
_complete_graph
+

the complete directed graph built using the nodes labels in _nodes

+
+
_cache
+

the Cache object

+
+
+
+
+complete_test(test_parent: str, test_child: str, parent_set: List, child_states_numb: int, tot_vars_count: int, parent_indx, child_indx) → bool
+

Performs a complete independence test on the directed graphs G1 = {test_child U parent_set} +G2 = {G1 U test_parent} (added as an additional parent of the test_child). +Generates all the necessary structures and datas to perform the tests.

+
+
Parameters
+
    +
  • test_parent (string) – the node label of the test parent

  • +
  • test_child (string) – the node label of the child

  • +
  • parent_set (List) – the common parent set

  • +
  • child_states_numb (int) – the cardinality of the test_child

  • +
  • tot_vars_count (int) – the total number of variables in the net

  • +
+
+
Returns
+

True iff test_child and test_parent are independent given the sep_set parent_set. False otherwise

+
+
Return type
+

bool

+
+
+
+ +
+
+compute_thumb_value(parent_val, child_val, parent_set_vals)
+

Compute the value to test against the thumb_threshold.

+
+
Parameters
+
    +
  • parent_val (int) – test parent’s variable cardinality

  • +
  • child_val (int) – test child’s variable cardinality

  • +
  • parent_set_vals (List) – the cardinalities of the nodes in the current sep-set

  • +
+
+
Returns
+

the thumb value for the current independence test

+
+
Return type
+

int

+
+
+
+ +
+
+ctpc_algorithm(disable_multiprocessing: bool = False)
+

Compute the CTPC algorithm over the entire net.

+
+ +
+
+estimate_structure(disable_multiprocessing: bool = False)
+

Abstract method to estimate the structure

+
+
Returns
+

List of estimated edges

+
+
Return type
+

Typing.List

+
+
+
+ +
+
+independence_test(child_states_numb: int, cim1: PyCTBN.structure_graph.conditional_intensity_matrix.ConditionalIntensityMatrix, cim2: PyCTBN.structure_graph.conditional_intensity_matrix.ConditionalIntensityMatrix, thumb_value: float, parent_indx, child_indx) → bool
+

Compute the actual independence test using two cims. +It is performed first the exponential test and if the null hypothesis is not rejected, +it is performed also the chi_test.

+
+
Parameters
+
+
+
Returns
+

True iff both tests do NOT reject the null hypothesis of independence. False otherwise.

+
+
Return type
+

bool

+
+
+
+ +
+
+one_iteration_of_CTPC_algorithm(var_id: str, tot_vars_count: int) → List
+

Performs an iteration of the CTPC algorithm using the node var_id as test_child.

+
+
Parameters
+

var_id (string) – the node label of the test child

+
+
+
+ +
+ +
+
+

PyCTBN.estimators.structure_estimator module

+
+
+class PyCTBN.estimators.structure_estimator.StructureEstimator(sample_path: PyCTBN.structure_graph.sample_path.SamplePath, known_edges: List = None)
+

Bases: object

+

Has the task of estimating the network structure given the trajectories in samplepath.

+
+
Parameters
+

sample_path (SamplePath) – the _sample_path object containing the trajectories and the real structure

+
+
_nodes
+

the nodes labels

+
+
_nodes_vals
+

the nodes cardinalities

+
+
_nodes_indxs
+

the nodes indexes

+
+
_complete_graph
+

the complete directed graph built using the nodes labels in _nodes

+
+
+
+
+adjacency_matrix() → numpy.ndarray
+

Converts the estimated structure _complete_graph to a boolean adjacency matrix representation.

+
+
Returns
+

The adjacency matrix of the graph _complete_graph

+
+
Return type
+

numpy.ndArray

+
+
+
+ +
+
+static build_complete_graph(node_ids: List) → networkx.PyCTBN.digraph.DiGraph
+

Builds a complete directed graph (no self loops) given the nodes labels in the list node_ids:

+
+
Parameters
+

node_ids (List) – the list of nodes labels

+
+
Returns
+

a complete Digraph Object

+
+
Return type
+

networkx.DiGraph

+
+
+
+ +
+
+build_removable_edges_matrix(known_edges: List)
+

Builds a boolean matrix who shows if a edge could be removed or not, based on prior knowledge given:

+
+
Parameters
+

known_edges (List) – the list of nodes labels

+
+
Returns
+

a boolean matrix

+
+
Return type
+

np.ndarray

+
+
+
+ +
+
+abstract estimate_structure() → List
+

Abstract method to estimate the structure

+
+
Returns
+

List of estimated edges

+
+
Return type
+

Typing.List

+
+
+
+ +
+
+static generate_possible_sub_sets_of_size(u: List, size: int, parent_label: str)
+

Creates a list containing all possible subsets of the list u of size size, +that do not contains a the node identified by parent_label.

+
+
Parameters
+
    +
  • u (List) – the list of nodes

  • +
  • size (int) – the size of the subsets

  • +
  • parent_label (string) – the node to exclude in the subsets generation

  • +
+
+
Returns
+

an Iterator Object containing a list of lists

+
+
Return type
+

Iterator

+
+
+
+ +
+
+remove_diagonal_elements(matrix)
+
+ +
+
+save_plot_estimated_structure_graph() → None
+

Plot the estimated structure in a graphical model style. +Spurious edges are colored in red.

+
+ +
+
+save_results() → None
+

Save the estimated Structure to a .json file in the path where the data are loaded from. +The file is named as the input dataset but the results_ word is appended to the results file.

+
+ +
+
+spurious_edges() → List
+
+
Return the spurious edges present in the estimated structure, if a prior net structure is present in

_sample_path.structure.

+
+
+
+
Returns
+

A list containing the spurious edges

+
+
Return type
+

List

+
+
+
+ +
+ +
+
+

PyCTBN.estimators.structure_score_based_estimator module

+
+
+class PyCTBN.estimators.structure_score_based_estimator.StructureScoreBasedEstimator(sample_path: PyCTBN.structure_graph.sample_path.SamplePath, tau_xu: int = 0.1, alpha_xu: int = 1, known_edges: List = [])
+

Bases: PyCTBN.estimators.structure_estimator.StructureEstimator

+

Has the task of estimating the network structure given the trajectories in samplepath by +using a score based approach.

+
+
Parameters
+
    +
  • sample_path (SamplePath) – the _sample_path object containing the trajectories and the real structure

  • +
  • tau_xu (float, optional) – hyperparameter over the CTBN’s q parameters, default to 0.1

  • +
  • alpha_xu (float, optional) – hyperparameter over the CTBN’s q parameters, default to 1

  • +
  • known_edges (List, optional) – List of known edges, default to []

  • +
+
+
+
+
+estimate_parents(node_id: str, max_parents: int = None, iterations_number: int = 40, patience: int = 10, tabu_length: int = None, tabu_rules_duration: int = 5, optimizer: str = 'hill')
+

Use the FamScore of a node in order to find the best parent nodes

+
+
Parameters
+
    +
  • node_id (string) – current node’s id

  • +
  • max_parents (int, optional) – maximum number of parents for each variable. If None, disabled, default to None

  • +
  • iterations_number (int, optional) – maximum number of optimization algorithm’s iteration, default to 40

  • +
  • patience (int, optional) – number of iteration without any improvement before to stop the search.If None, disabled, default to None

  • +
  • tabu_length (int, optional) – maximum lenght of the data structures used in the optimization process, default to None

  • +
  • tabu_rules_duration (int, optional) – number of iterations in which each rule keeps its value, default to None

  • +
  • optimizer (string, optional) – name of the optimizer algorithm. Possible values: ‘hill’ (Hill climbing),’tabu’ (tabu search), defualt to ‘tabu’

  • +
+
+
Returns
+

A list of the best edges for the currente node

+
+
Return type
+

List

+
+
+
+ +
+
+estimate_structure(max_parents: int = None, iterations_number: int = 40, patience: int = None, tabu_length: int = None, tabu_rules_duration: int = None, optimizer: str = 'tabu', disable_multiprocessing: bool = False)
+

Compute the score-based algorithm to find the optimal structure

+
+
Parameters
+
    +
  • max_parents (int, optional) – maximum number of parents for each variable. If None, disabled, default to None

  • +
  • iterations_number (int, optional) – maximum number of optimization algorithm’s iteration, default to 40

  • +
  • patience (int, optional) – number of iteration without any improvement before to stop the search.If None, disabled, default to None

  • +
  • tabu_length (int, optional) – maximum lenght of the data structures used in the optimization process, default to None

  • +
  • tabu_rules_duration (int, optional) – number of iterations in which each rule keeps its value, default to None

  • +
  • optimizer (string, optional) – name of the optimizer algorithm. Possible values: ‘hill’ (Hill climbing),’tabu’ (tabu search), defualt to ‘tabu’

  • +
  • disable_multiprocessing (Boolean, optional) – true if you desire to disable the multiprocessing operations, default to False

  • +
+
+
+
+ +
+
+get_score_from_graph(graph: PyCTBN.structure_graph.network_graph.NetworkGraph, node_id: str)
+

Get the FamScore of a node

+
+
Parameters
+
    +
  • node_id (string) – current node’s id

  • +
  • graph (class:'NetworkGraph') – current graph to be computed

  • +
+
+
Returns
+

The FamSCore for this graph structure

+
+
Return type
+

float

+
+
+
+ +
+ +
+
+

Module contents

+
+
+ + +
+
+ + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/classes.html b/docs/_build/html/classes.html new file mode 100644 index 0000000..c015456 --- /dev/null +++ b/docs/_build/html/classes.html @@ -0,0 +1,226 @@ + + + + + + + + + + + PyCTBN package — PyCTBN 2.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+ + + +
+
+ + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/classes.optimizers.html b/docs/_build/html/classes.optimizers.html new file mode 100644 index 0000000..e7f5954 --- /dev/null +++ b/docs/_build/html/classes.optimizers.html @@ -0,0 +1,317 @@ + + + + + + + + + + + PyCTBN.optimizers package — PyCTBN 2.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+
+

PyCTBN.optimizers package

+
+

Submodules

+
+
+

PyCTBN.optimizers.constraint_based_optimizer module

+
+
+class PyCTBN.optimizers.constraint_based_optimizer.ConstraintBasedOptimizer(node_id: str, structure_estimator: PyCTBN.estimators.structure_estimator.StructureEstimator, tot_vars_count: int)
+

Bases: PyCTBN.optimizers.optimizer.Optimizer

+

Optimizer class that implement a CTPC Algorithm

+
+
Parameters
+
    +
  • node_id (string) – current node’s id

  • +
  • structure_estimator (class:'StructureEstimator') – a structure estimator object with the information about the net

  • +
  • tot_vars_count (int) – number of variables in the dataset

  • +
+
+
+
+
+optimize_structure()
+

Compute Optimization process for a structure_estimator by using a CTPC Algorithm

+
+
Returns
+

the estimated structure for the node

+
+
Return type
+

List

+
+
+
+ +
+ +
+ +
+

PyCTBN.optimizers.optimizer module

+
+
+class PyCTBN.optimizers.optimizer.Optimizer(node_id: str, structure_estimator: PyCTBN.estimators.structure_estimator.StructureEstimator)
+

Bases: abc.ABC

+

Interface class for all the optimizer’s child PyCTBN

+
+
Parameters
+
    +
  • node_id (string) – the node label

  • +
  • structure_estimator (class:'StructureEstimator') – A structureEstimator Object to predict the structure

  • +
+
+
+
+
+abstract optimize_structure() → List
+

Compute Optimization process for a structure_estimator

+
+
Returns
+

the estimated structure for the node

+
+
Return type
+

List

+
+
+
+ +
+ +
+ +
+

Module contents

+
+
+ + +
+
+ + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/classes.structure_graph.html b/docs/_build/html/classes.structure_graph.html new file mode 100644 index 0000000..2bba96d --- /dev/null +++ b/docs/_build/html/classes.structure_graph.html @@ -0,0 +1,914 @@ + + + + + + + + + + + PyCTBN.structure_graph package — PyCTBN 2.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+
+

PyCTBN.structure_graph package

+
+

Submodules

+
+
+

PyCTBN.structure_graph.abstract_sample_path module

+
+
+class PyCTBN.structure_graph.abstract_sample_path.AbstractSamplePath(importer: utility.abstract_importer.AbstractImporter)
+

Bases: abc.ABC

+
+
+abstract build_structure()
+

Builds the Structure object that aggregates all the infos about the net. +Assigns the Structure object to the instance attribuite _structure +Parameters:

+
+

void

+
+
+
Returns:

void

+
+
+
+ +
+
+abstract build_trajectories()
+

Builds the Trajectory object that will contain all the trajectories. +Assigns the Trajectoriy object to the instance attribute _trajectories +Clears all the unused dataframes in Importer Object

+
+
Parameters:

void

+
+
Returns:

void

+
+
+
+ +
+ +
+
+

PyCTBN.structure_graph.conditional_intensity_matrix module

+
+
+class PyCTBN.structure_graph.conditional_intensity_matrix.ConditionalIntensityMatrix(state_residence_times: numpy.array, state_transition_matrix: numpy.array)
+

Bases: object

+

Abstracts the Conditional Intesity matrix of a node as aggregation of the state residence times vector +and state transition matrix and the actual CIM matrix.

+
+
Parameters
+
    +
  • state_residence_times (numpy.array) – state residence times vector

  • +
  • state_transition_matrix (numpy.ndArray) – the transitions count matrix

  • +
+
+
_cim
+

the actual cim of the node

+
+
+
+
+property cim
+
+ +
+
+compute_cim_coefficients() → None
+

Compute the coefficients of the matrix _cim by using the following equality q_xx’ = M[x, x’] / T[x]. +The class member _cim will contain the computed cim

+
+ +
+
+property state_residence_times
+
+ +
+
+property state_transition_matrix
+
+ +
+ +
+
+

PyCTBN.structure_graph.network_graph module

+
+
+class PyCTBN.structure_graph.network_graph.NetworkGraph(graph_struct: PyCTBN.structure_graph.structure.Structure)
+

Bases: object

+

Abstracts the infos contained in the Structure class in the form of a directed graph. +Has the task of creating all the necessary filtering and indexing structures for parameters estimation

+
+
Parameters
+

graph_struct (Structure) – the Structure object from which infos about the net will be extracted

+
+
_graph
+

directed graph

+
+
_aggregated_info_about_nodes_parents
+

a structure that contains all the necessary infos +about every parents of the node of which all the indexing and filtering structures will be constructed.

+
+
_time_scalar_indexing_structure
+

the indexing structure for state res time estimation

+
+
_transition_scalar_indexing_structure
+

the indexing structure for transition computation

+
+
_time_filtering
+

the columns filtering structure used in the computation of the state res times

+
+
_transition_filtering
+

the columns filtering structure used in the computation of the transition +from one state to another

+
+
_p_combs_structure
+

all the possible parents states combination for the node of interest

+
+
+
+
+add_edges(list_of_edges: List) → None
+

Add the edges to the _graph contained in the list list_of_edges.

+
+
Parameters
+

list_of_edges (List) – the list containing of tuples containing the edges

+
+
+
+ +
+
+add_nodes(list_of_nodes: List) → None
+

Adds the nodes to the _graph contained in the list of nodes list_of_nodes. +Sets all the properties that identify a nodes (index, positional index, cardinality)

+
+
Parameters
+

list_of_nodes (List) – the nodes to add to _graph

+
+
+
+ +
+
+static build_p_comb_structure_for_a_node(parents_values: List) → numpy.ndarray
+

Builds the combinatorial structure that contains the combinations of all the values contained in +parents_values.

+
+
Parameters
+

parents_values (List) – the cardinalities of the nodes

+
+
Returns
+

A numpy matrix containing a grid of the combinations

+
+
Return type
+

numpy.ndArray

+
+
+
+ +
+
+static build_time_columns_filtering_for_a_node(node_indx: int, p_indxs: List) → numpy.ndarray
+

Builds the necessary structure to filter the desired columns indicated by node_indx and p_indxs +in the dataset. +This structute will be used in the computation of the state res times. +:param node_indx: the index of the node +:type node_indx: int +:param p_indxs: the indexes of the node’s parents +:type p_indxs: List +:return: The filtering structure for times estimation +:rtype: numpy.ndArray

+
+ +
+
+static build_time_scalar_indexing_structure_for_a_node(node_states: int, parents_vals: List) → numpy.ndarray
+

Builds an indexing structure for the computation of state residence times values.

+
+
Parameters
+
    +
  • node_states (int) – the node cardinality

  • +
  • parents_vals (List) – the caridinalites of the node’s parents

  • +
+
+
Returns
+

The time indexing structure

+
+
Return type
+

numpy.ndArray

+
+
+
+ +
+
+static build_transition_filtering_for_a_node(node_indx: int, p_indxs: List, nodes_number: int) → numpy.ndarray
+

Builds the necessary structure to filter the desired columns indicated by node_indx and p_indxs +in the dataset. +This structure will be used in the computation of the state transitions values. +:param node_indx: the index of the node +:type node_indx: int +:param p_indxs: the indexes of the node’s parents +:type p_indxs: List +:param nodes_number: the total number of nodes in the dataset +:type nodes_number: int +:return: The filtering structure for transitions estimation +:rtype: numpy.ndArray

+
+ +
+
+static build_transition_scalar_indexing_structure_for_a_node(node_states_number: int, parents_vals: List) → numpy.ndarray
+

Builds an indexing structure for the computation of state transitions values.

+
+
Parameters
+
    +
  • node_states_number (int) – the node cardinality

  • +
  • parents_vals (List) – the caridinalites of the node’s parents

  • +
+
+
Returns
+

The transition indexing structure

+
+
Return type
+

numpy.ndArray

+
+
+
+ +
+
+clear_indexing_filtering_structures() → None
+

Initialize all the filtering/indexing structures.

+
+ +
+
+property edges
+
+ +
+
+fast_init(node_id: str) → None
+

Initializes all the necessary structures for parameters estimation of the node identified by the label +node_id

+
+
Parameters
+

node_id (string) – the label of the node

+
+
+
+ +
+
+get_node_indx(node_id) → int
+
+ +
+
+get_ordered_by_indx_set_of_parents(node: str) → Tuple
+

Builds the aggregated structure that holds all the infos relative to the parent set of the node, namely +(parents_labels, parents_indexes, parents_cardinalities).

+
+
Parameters
+

node (string) – the label of the node

+
+
Returns
+

a tuple containing all the parent set infos

+
+
Return type
+

Tuple

+
+
+
+ +
+
+get_parents_by_id(node_id) → List
+

Returns a list of labels of the parents of the node node_id

+
+
Parameters
+

node_id (string) – the node label

+
+
Returns
+

a List of labels of the parents

+
+
Return type
+

List

+
+
+
+ +
+
+get_positional_node_indx(node_id) → int
+
+ +
+
+get_states_number(node_id) → int
+
+ +
+
+has_edge(edge: tuple) → bool
+

Check if the graph contains a specific edge

+
+
Parameters:

edge: a tuple that rappresents the edge

+
+
Returns:

bool

+
+
+
+ +
+
+init_graph()
+
+ +
+
+property nodes
+
+ +
+
+property nodes_indexes
+
+ +
+
+property nodes_values
+
+ +
+
+property p_combs
+
+ +
+
+remove_edges(list_of_edges: List) → None
+

Remove the edges to the graph contained in the list list_of_edges.

+
+
Parameters
+

list_of_edges (List) – The edges to remove from the graph

+
+
+
+ +
+
+remove_node(node_id: str) → None
+

Remove the node node_id from all the class members. +Initialize all the filtering/indexing structures.

+
+ +
+
+property time_filtering
+
+ +
+
+property time_scalar_indexing_strucure
+
+ +
+
+property transition_filtering
+
+ +
+
+property transition_scalar_indexing_structure
+
+ +
+ +
+
+

PyCTBN.structure_graph.sample_path module

+
+
+class PyCTBN.structure_graph.sample_path.SamplePath(importer: PyCTBN.utility.abstract_importer.AbstractImporter)
+

Bases: object

+

Aggregates all the informations about the trajectories, the real structure of the sampled net and variables +cardinalites. Has the task of creating the objects Trajectory and Structure that will +contain the mentioned data.

+
+
Parameters
+

importer (AbstractImporter) – the Importer object which contains the imported and processed data

+
+
_trajectories
+

the Trajectory object that will contain all the concatenated trajectories

+
+
_structure
+

the Structure Object that will contain all the structural infos about the net

+
+
_total_variables_count
+

the number of variables in the net

+
+
+
+
+build_structure() → None
+

Builds the Structure object that aggregates all the infos about the net.

+
+ +
+
+build_trajectories() → None
+

Builds the Trajectory object that will contain all the trajectories. +Clears all the unused dataframes in _importer Object

+
+ +
+
+clear_memory()
+
+ +
+
+property has_prior_net_structure
+
+ +
+
+property structure
+
+ +
+
+property total_variables_count
+
+ +
+
+property trajectories
+
+ +
+ +
+
+

PyCTBN.structure_graph.set_of_cims module

+
+
+class PyCTBN.structure_graph.set_of_cims.SetOfCims(node_id: str, parents_states_number: List, node_states_number: int, p_combs: numpy.ndarray)
+

Bases: object

+

Aggregates all the CIMS of the node identified by the label _node_id.

+
+
Parameters
+
    +
  • node_id – the node label

  • +
  • parents_states_number (List) – the cardinalities of the parents

  • +
  • node_states_number (int) – the caridinality of the node

  • +
  • p_combs (numpy.ndArray) – the p_comb structure bound to this node

  • +
+
+
_state_residence_time
+

matrix containing all the state residence time vectors for the node

+
+
_transition_matrices
+

matrix containing all the transition matrices for the node

+
+
_actual_cims
+

the cims of the node

+
+
+
+
+property actual_cims
+
+ +
+
+build_cims(state_res_times: numpy.ndarray, transition_matrices: numpy.ndarray) → None
+

Build the ConditionalIntensityMatrix objects given the state residence times and transitions matrices. +Compute the cim coefficients.The class member _actual_cims will contain the computed cims.

+
+
Parameters
+
    +
  • state_res_times (numpy.ndArray) – the state residence times matrix

  • +
  • transition_matrices (numpy.ndArray) – the transition matrices

  • +
+
+
+
+ +
+
+build_times_and_transitions_structures() → None
+

Initializes at the correct dimensions the state residence times matrix and the state transition matrices.

+
+ +
+
+filter_cims_with_mask(mask_arr: numpy.ndarray, comb: List) → numpy.ndarray
+

Filter the cims contained in the array _actual_cims given the boolean mask mask_arr and the index +comb.

+
+
Parameters
+
    +
  • mask_arr (numpy.array) – the boolean mask that indicates which parent to consider

  • +
  • comb (numpy.array) – the state/s of the filtered parents

  • +
+
+
Returns
+

Array of ConditionalIntensityMatrix objects

+
+
Return type
+

numpy.array

+
+
+
+ +
+
+get_cims_number()
+
+ +
+
+property p_combs
+
+ +
+ +
+
+

PyCTBN.structure_graph.sets_of_cims_container module

+
+
+class PyCTBN.structure_graph.sets_of_cims_container.SetsOfCimsContainer(list_of_keys, states_number_per_node, list_of_parents_states_number, p_combs_list)
+

Bases: object

+

Aggrega un insieme di oggetti SetOfCims

+
+
+get_cims_of_node(node_indx, cim_indx)
+
+ +
+
+get_set_of_cims(node_indx)
+
+ +
+
+init_cims_structure(keys, states_number_per_node, list_of_parents_states_number, p_combs_list)
+

for indx, key in enumerate(keys): +self.sets_of_cims.append(

+
+

socim.SetOfCims(key, list_of_parents_states_number[indx], states_number_per_node[indx]))

+
+
+ +
+ +
+
+

PyCTBN.structure_graph.structure module

+
+
+class PyCTBN.structure_graph.structure.Structure(nodes_labels_list: List, nodes_indexes_arr: numpy.ndarray, nodes_vals_arr: numpy.ndarray, edges_list: List, total_variables_number: int)
+

Bases: object

+

Contains all the infos about the network structure(nodes labels, nodes caridinalites, edges, indexes)

+
+
Parameters
+
    +
  • nodes_labels_list (List) – the symbolic names of the variables

  • +
  • nodes_indexes_arr (numpy.ndArray) – the indexes of the nodes

  • +
  • nodes_vals_arr (numpy.ndArray) – the cardinalites of the nodes

  • +
  • edges_list (List) – the edges of the network

  • +
  • total_variables_number (int) – the total number of variables in the dataset

  • +
+
+
+
+
+add_edge(edge: tuple)
+
+ +
+
+clean_structure_edges()
+
+ +
+
+contains_edge(edge: tuple) → bool
+
+ +
+
+property edges
+
+ +
+
+get_node_id(node_indx: int) → str
+

Given the node_index returns the node label.

+
+
Parameters
+

node_indx (int) – the node index

+
+
Returns
+

the node label

+
+
Return type
+

string

+
+
+
+ +
+
+get_node_indx(node_id: str) → int
+

Given the node_index returns the node label.

+
+
Parameters
+

node_id (string) – the node label

+
+
Returns
+

the node index

+
+
Return type
+

int

+
+
+
+ +
+
+get_positional_node_indx(node_id: str) → int
+
+ +
+
+get_states_number(node: str) → int
+

Given the node label node returns the cardinality of the node.

+
+
Parameters
+

node (string) – the node label

+
+
Returns
+

the node cardinality

+
+
Return type
+

int

+
+
+
+ +
+
+property nodes_indexes
+
+ +
+
+property nodes_labels
+
+ +
+
+property nodes_values
+
+ +
+
+remove_edge(edge: tuple)
+
+ +
+
+remove_node(node_id: str) → None
+

Remove the node node_id from all the class members. +The class member _total_variables_number since it refers to the total number of variables in the dataset.

+
+ +
+
+property total_variables_number
+
+ +
+ +
+
+

PyCTBN.structure_graph.trajectory module

+
+
+class PyCTBN.structure_graph.trajectory.Trajectory(list_of_columns: List, original_cols_number: int)
+

Bases: object

+

Abstracts the infos about a complete set of trajectories, represented as a numpy array of doubles +(the time deltas) and a numpy matrix of ints (the changes of states).

+
+
Parameters
+
    +
  • list_of_columns (List) – the list containing the times array and values matrix

  • +
  • original_cols_number (int) – total number of cols in the data

  • +
+
+
_actual_trajectory
+

the trajectory containing also the duplicated/shifted values

+
+
_times
+

the array containing the time deltas

+
+
+
+
+property complete_trajectory
+
+ +
+
+size()
+
+ +
+
+property times
+
+ +
+
+property trajectory
+
+ +
+ +
+
+

Module contents

+
+
+ + +
+
+ + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/classes.utility.html b/docs/_build/html/classes.utility.html new file mode 100644 index 0000000..f9caabc --- /dev/null +++ b/docs/_build/html/classes.utility.html @@ -0,0 +1,649 @@ + + + + + + + + + + + PyCTBN.utility package — PyCTBN 2.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+
+

PyCTBN.utility package

+
+

Submodules

+
+
+

PyCTBN.utility.abstract_importer module

+
+
+class PyCTBN.utility.abstract_importer.AbstractImporter(file_path: str = None, trajectory_list: Union[pandas.core.frame.DataFrame, numpy.ndarray] = None, variables: pandas.core.frame.DataFrame = None, prior_net_structure: pandas.core.frame.DataFrame = None)
+

Bases: abc.ABC

+

Abstract class that exposes all the necessary methods to process the trajectories and the net structure.

+
+
Parameters
+
    +
  • file_path (str) – the file path, or dataset name if you import already processed data

  • +
  • trajectory_list (typing.Union[pandas.DataFrame, numpy.ndarray]) – Dataframe or numpy array containing the concatenation of all the processed trajectories

  • +
  • variables (pandas.DataFrame) – Dataframe containing the nodes labels and cardinalities

  • +
+
+
Prior_net_structure
+

Dataframe containing the structure of the network (edges)

+
+
_sorter
+

A list containing the variables labels in the SAME order as the columns in concatenated_samples

+
+
+
+

Warning

+

The parameters variables and prior_net_structure HAVE to be properly constructed +as Pandas Dataframes with the following structure: +Header of _df_structure = [From_Node | To_Node] +Header of _df_variables = [Variable_Label | Variable_Cardinality] +See the tutorial on how to construct a correct concatenated_samples Dataframe/ndarray.

+
+
+

Note

+

See :class:JsonImporter for an example implementation

+
+
+
+build_list_of_samples_array(concatenated_sample: pandas.core.frame.DataFrame) → List
+

Builds a List containing the the delta times numpy array, and the complete transitions matrix

+
+
Parameters
+

concatenated_sample (pandas.Dataframe) – the dataframe/array from which the time, and transitions matrix have to be extracted +and converted

+
+
Returns
+

the resulting list of numpy arrays

+
+
Return type
+

List

+
+
+
+ +
+
+abstract build_sorter(trajecory_header: object) → List
+

Initializes the _sorter class member from a trajectory dataframe, exctracting the header of the frame +and keeping ONLY the variables symbolic labels, cutting out the time label in the header.

+
+
Parameters
+

trajecory_header (object) – an object that will be used to define the header

+
+
Returns
+

A list containing the processed header.

+
+
Return type
+

List

+
+
+
+ +
+
+clear_concatenated_frame() → None
+

Removes all values in the dataframe concatenated_samples.

+
+ +
+
+compute_row_delta_in_all_samples_frames(df_samples_list: List) → None
+

Calls the method compute_row_delta_sigle_samples_frame on every dataframe present in the list +df_samples_list. +Concatenates the result in the dataframe concatanated_samples

+
+
Parameters
+

df_samples_list (List) – the datframe’s list to be processed and concatenated

+
+
+
+

Warning

+

The Dataframe sample_frame has to follow the column structure of this header: +Header of sample_frame = [Time | Variable values] +The class member self._sorter HAS to be properly INITIALIZED (See class members definition doc)

+
+
+

Note

+

After the call of this method the class member concatanated_samples will contain all processed +and merged trajectories

+
+
+ +
+
+compute_row_delta_sigle_samples_frame(sample_frame: pandas.core.frame.DataFrame, columns_header: List, shifted_cols_header: List) → pandas.core.frame.DataFrame
+

Computes the difference between each value present in th time column. +Copies and shift by one position up all the values present in the remaining columns.

+
+
Parameters
+
    +
  • sample_frame (pandas.Dataframe) – the traj to be processed

  • +
  • columns_header (List) – the original header of sample_frame

  • +
  • shifted_cols_header (List) – a copy of columns_header with changed names of the contents

  • +
+
+
Returns
+

The processed dataframe

+
+
Return type
+

pandas.Dataframe

+
+
+
+

Warning

+

the Dataframe sample_frame has to follow the column structure of this header: +Header of sample_frame = [Time | Variable values]

+
+
+ +
+
+property concatenated_samples
+
+ +
+
+abstract dataset_id() → object
+

If the original dataset contains multiple dataset, this method returns a unique id to identify the current +dataset

+
+ +
+
+property file_path
+
+ +
+
+property sorter
+
+ +
+
+property structure
+
+ +
+
+property variables
+
+ +
+ +
+
+

PyCTBN.utility.cache module

+
+
+class PyCTBN.utility.cache.Cache
+

Bases: object

+

This class acts as a cache of SetOfCims objects for a node.

+
+
_list_of_sets_of_parents
+

a list of Sets objects of the parents to which the cim in cache at SAME +index is related

+
+
_actual_cache
+

a list of setOfCims objects

+
+
+
+
+clear()
+

Clear the contents both of _actual_cache and _list_of_sets_of_parents.

+
+ +
+
+find(parents_comb: Set)
+

Tries to find in cache given the symbolic parents combination parents_comb the SetOfCims +related to that parents_comb.

+
+
Parameters
+

parents_comb (Set) – the parents related to that SetOfCims

+
+
Returns
+

A SetOfCims object if the parents_comb index is found in _list_of_sets_of_parents. +None otherwise.

+
+
Return type
+

SetOfCims

+
+
+
+ +
+
+put(parents_comb: Set, socim: PyCTBN.structure_graph.set_of_cims.SetOfCims)
+

Place in cache the SetOfCims object, and the related symbolic index parents_comb in +_list_of_sets_of_parents.

+
+
Parameters
+
    +
  • parents_comb (Set) – the symbolic set index

  • +
  • socim (SetOfCims) – the related SetOfCims object

  • +
+
+
+
+ +
+ +
+
+

PyCTBN.utility.decorators module

+
+
+PyCTBN.utility.decorators.timing(f)
+
+ +
+
+PyCTBN.utility.decorators.timing_write(f)
+
+ +
+
+

PyCTBN.utility.json_importer module

+
+
+class PyCTBN.utility.json_importer.JsonImporter(file_path: str, samples_label: str, structure_label: str, variables_label: str, time_key: str, variables_key: str)
+

Bases: PyCTBN.utility.abstract_importer.AbstractImporter

+

Implements the abstracts methods of AbstractImporter and adds all the necessary methods to process and prepare +the data in json extension.

+
+
Parameters
+
    +
  • file_path (string) – the path of the file that contains tha data to be imported

  • +
  • samples_label (string) – the reference key for the samples in the trajectories

  • +
  • structure_label (string) – the reference key for the structure of the network data

  • +
  • variables_label (string) – the reference key for the cardinalites of the nodes data

  • +
  • time_key (string) – the key used to identify the timestamps in each trajectory

  • +
  • variables_key (string) – the key used to identify the names of the variables in the net

  • +
+
+
_array_indx
+

the index of the outer JsonArray to extract the data from

+
+
_df_samples_list
+

a Dataframe list in which every dataframe contains a trajectory

+
+
_raw_data
+

The raw contents of the json file to import

+
+
+
+
+build_sorter(sample_frame: pandas.core.frame.DataFrame) → List
+

Implements the abstract method build_sorter of the AbstractImporter for this dataset.

+
+ +
+
+clear_data_frame_list() → None
+

Removes all values present in the dataframes in the list _df_samples_list.

+
+ +
+
+dataset_id() → object
+

If the original dataset contains multiple dataset, this method returns a unique id to identify the current +dataset

+
+ +
+
+import_data(indx: int) → None
+

Implements the abstract method of AbstractImporter.

+
+
Parameters
+

indx (int) – the index of the outer JsonArray to extract the data from

+
+
+
+ +
+
+import_sampled_cims(raw_data: List, indx: int, cims_key: str) → Dict
+

Imports the synthetic CIMS in the dataset in a dictionary, using variables labels +as keys for the set of CIMS of a particular node.

+
+
Parameters
+
    +
  • raw_data (List) – List of Dicts

  • +
  • indx (int) – The index of the array from which the data have to be extracted

  • +
  • cims_key (string) – the key where the json object cims are placed

  • +
+
+
Returns
+

a dictionary containing the sampled CIMS for all the variables in the net

+
+
Return type
+

Dictionary

+
+
+
+ +
+
+import_structure(raw_data: List) → pandas.core.frame.DataFrame
+

Imports in a dataframe the data in the list raw_data at the key _structure_label

+
+
Parameters
+

raw_data (List) – List of Dicts

+
+
Returns
+

Dataframe containg the starting node a ending node of every arc of the network

+
+
Return type
+

pandas.Dataframe

+
+
+
+ +
+
+import_trajectories(raw_data: List) → List
+

Imports the trajectories from the list of dicts raw_data.

+
+
Parameters
+

raw_data (List) – List of Dicts

+
+
Returns
+

List of dataframes containing all the trajectories

+
+
Return type
+

List

+
+
+
+ +
+
+import_variables(raw_data: List) → pandas.core.frame.DataFrame
+

Imports the data in raw_data at the key _variables_label.

+
+
Parameters
+

raw_data (List) – List of Dicts

+
+
Returns
+

Datframe containg the variables simbolic labels and their cardinalities

+
+
Return type
+

pandas.Dataframe

+
+
+
+ +
+
+normalize_trajectories(raw_data: List, indx: int, trajectories_key: str) → List
+

Extracts the trajectories in raw_data at the index index at the key trajectories key.

+
+
Parameters
+
    +
  • raw_data (List) – List of Dicts

  • +
  • indx (int) – The index of the array from which the data have to be extracted

  • +
  • trajectories_key (string) – the key of the trajectories objects

  • +
+
+
Returns
+

A list of daframes containg the trajectories

+
+
Return type
+

List

+
+
+
+ +
+
+one_level_normalizing(raw_data: List, indx: int, key: str) → pandas.core.frame.DataFrame
+

Extracts the one-level nested data in the list raw_data at the index indx at the key key.

+
+
Parameters
+
    +
  • raw_data (List) – List of Dicts

  • +
  • indx (int) – The index of the array from which the data have to be extracted

  • +
  • key (string) – the key for the Dicts from which exctract data

  • +
+
+
Returns
+

A normalized dataframe

+
+
Return type
+

pandas.Datframe

+
+
+
+ +
+
+read_json_file() → List
+

Reads the JSON file in the path self.filePath.

+
+
Returns
+

The contents of the json file

+
+
Return type
+

List

+
+
+
+ +
+ +
+
+

PyCTBN.utility.sample_importer module

+
+
+class PyCTBN.utility.sample_importer.SampleImporter(trajectory_list: Union[pandas.core.frame.DataFrame, numpy.ndarray, List] = None, variables: Union[pandas.core.frame.DataFrame, numpy.ndarray, List] = None, prior_net_structure: Union[pandas.core.frame.DataFrame, numpy.ndarray, List] = None)
+

Bases: PyCTBN.utility.abstract_importer.AbstractImporter

+

Implements the abstracts methods of AbstractImporter and adds all the necessary methods to process and prepare +the data loaded directly by using DataFrame

+
+
Parameters
+
    +
  • trajectory_list (typing.Union[pd.DataFrame, np.ndarray, typing.List]) – the data that describes the trajectories

  • +
  • variables (typing.Union[pd.DataFrame, np.ndarray, typing.List]) – the data that describes the variables with name and cardinality

  • +
  • prior_net_structure (typing.Union[pd.DataFrame, np.ndarray, typing.List]) – the data of the real structure, if it exists

  • +
+
+
_df_samples_list
+

a Dataframe list in which every dataframe contains a trajectory

+
+
_raw_data
+

The raw contents of the json file to import

+
+
+
+
+build_sorter(sample_frame: pandas.core.frame.DataFrame) → List
+

Implements the abstract method build_sorter of the AbstractImporter in order to get the ordered variables list.

+
+ +
+
+dataset_id() → object
+

If the original dataset contains multiple dataset, this method returns a unique id to identify the current +dataset

+
+ +
+
+import_data(header_column=None)
+
+ +
+ +
+
+

Module contents

+
+
+ + +
+
+ + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/examples.html b/docs/_build/html/examples.html new file mode 100644 index 0000000..0b23f72 --- /dev/null +++ b/docs/_build/html/examples.html @@ -0,0 +1,298 @@ + + + + + + + + + + + Examples — PyCTBN 2.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+
+

Examples

+
+

Installation/Usage

+

Download the release in .tar.gz or .whl format and simply use pip install to install it:

+
$pip install PyCTBN-1.0.tar.gz
+
+
+
+
+

Implementing your own data importer

+
"""This example demonstrates the implementation of a simple data importer the extends the class abstract importer to import data in csv format.
+The net in exam has three ternary nodes and no prior net structure.
+"""
+
+from PyCTBN import AbstractImporter
+
+class CSVImporter(AbstractImporter):
+
+    def __init__(self, file_path):
+        self._df_samples_list = None
+        super(CSVImporter, self).__init__(file_path)
+
+    def import_data(self):
+        self.read_csv_file()
+        self._sorter = self.build_sorter(self._df_samples_list[0])
+        self.import_variables()
+        self.compute_row_delta_in_all_samples_frames(self._df_samples_list)
+
+    def read_csv_file(self):
+        df = pd.read_csv(self._file_path)
+        df.drop(df.columns[[0]], axis=1, inplace=True)
+        self._df_samples_list = [df]
+
+    def import_variables(self):
+        values_list = [3 for var in self._sorter]
+        # initialize dict of lists
+        data = {'Name':self._sorter, 'Value':values_list}
+        # Create the pandas DataFrame
+        self._df_variables = pd.DataFrame(data)
+
+    def build_sorter(self, sample_frame: pd.DataFrame) -> typing.List:
+        return list(sample_frame.columns)[1:]
+
+    def dataset_id(self) -> object:
+        pass
+
+
+
+
+

Parameters Estimation Example

+
from PyCTBN import JsonImporter
+from PyCTBN import SamplePath
+from PyCTBN import NetworkGraph
+from PyCTBN import ParametersEstimator
+
+
+def main():
+    read_files = glob.glob(os.path.join('./data', "*.json")) #Take all json files in this dir
+    #import data
+    importer = JsonImporter(read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
+    importer.import_data(0)
+    #Create a SamplePath Obj passing an already filled AbstractImporter object
+    s1 = SamplePath(importer)
+    #Build The trajectries and the structural infos
+    s1.build_trajectories()
+    s1.build_structure()
+    print(s1.structure.edges)
+    print(s1.structure.nodes_values)
+    #From The Structure Object build the Graph
+    g = NetworkGraph(s1.structure)
+    #Select a node you want to estimate the parameters
+    node = g.nodes[2]
+    print("Node", node)
+    #Init the _graph specifically for THIS node
+    g.fast_init(node)
+    #Use SamplePath and Grpah to create a ParametersEstimator Object
+    p1 = ParametersEstimator(s1.trajectories, g)
+    #Init the peEst specifically for THIS node
+    p1.fast_init(node)
+    #Compute the parameters
+    sofc1 = p1.compute_parameters_for_node(node)
+    #The est CIMS are inside the resultant SetOfCIms Obj
+    print(sofc1.actual_cims)
+
+
+
+
+

Structure Estimation Example

+
from PyCTBN import JsonImporter
+from PyCTBN import SamplePath
+from PyCTBN import StructureEstimator
+
+def structure_estimation_example():
+
+    # read the json files in ./data path
+    read_files = glob.glob(os.path.join('./data', "*.json"))
+    # initialize a JsonImporter object for the first file
+    importer = JsonImporter(read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name')
+    # import the data at index 0 of the outer json array
+    importer.import_data(0)
+    # construct a SamplePath Object passing a filled AbstractImporter
+    s1 = SamplePath(importer)
+    # build the trajectories
+    s1.build_trajectories()
+    # build the real structure
+    s1.build_structure()
+    # construct a StructureEstimator object
+    se1 = StructureEstimator(s1, 0.1, 0.1)
+    # call the ctpc algorithm
+    se1.ctpc_algorithm()
+    # the adjacency matrix of the estimated structure
+    print(se1.adjacency_matrix())
+    # save results to a json file
+    se1.save_results()
+
+
+
+
+ + +
+
+ + + + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/genindex.html b/docs/_build/html/genindex.html new file mode 100644 index 0000000..91cecdd --- /dev/null +++ b/docs/_build/html/genindex.html @@ -0,0 +1,923 @@ + + + + + + + + + + + Index — PyCTBN 2.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+ +

Index

+ +
+ A + | B + | C + | D + | E + | F + | G + | H + | I + | J + | M + | N + | O + | P + | R + | S + | T + | V + +
+

A

+ + + +
+ +

B

+ + + +
+ +

C

+ + + +
+ +

D

+ + +
+ +

E

+ + + +
+ +

F

+ + + +
+ +

G

+ + + +
+ +

H

+ + + +
+ +

I

+ + + +
+ +

J

+ + +
+ +

M

+ + +
+ +

N

+ + + +
+ +

O

+ + + +
+ +

P

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
+ +

T

+ + + +
+ +

V

+ + + +
+ + + +
+
+ + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/index.html b/docs/_build/html/index.html new file mode 100644 index 0000000..09124b3 --- /dev/null +++ b/docs/_build/html/index.html @@ -0,0 +1,202 @@ + + + + + + + + + + + Welcome to PyCTBN’s documentation! — PyCTBN 2.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+ +
+

Indices and tables

+ +
+ + +
+
+ + + + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/modules.html b/docs/_build/html/modules.html new file mode 100644 index 0000000..370654c --- /dev/null +++ b/docs/_build/html/modules.html @@ -0,0 +1,227 @@ + + + + + + + + + + + PyCTBN — PyCTBN 2.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+ + + +
+
+ + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/objects.inv b/docs/_build/html/objects.inv new file mode 100644 index 0000000..6d2cd99 Binary files /dev/null and b/docs/_build/html/objects.inv differ diff --git a/docs/_build/html/py-modindex.html b/docs/_build/html/py-modindex.html new file mode 100644 index 0000000..d4d863b --- /dev/null +++ b/docs/_build/html/py-modindex.html @@ -0,0 +1,322 @@ + + + + + + + + + + + Python Module Index — PyCTBN 2.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+ +

Python Module Index

+ +
+ c +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+ c
+ PyCTBN +
    + PyCTBN.estimators +
    + PyCTBN.estimators.fam_score_calculator +
    + PyCTBN.estimators.parameters_estimator +
    + PyCTBN.estimators.structure_constraint_based_estimator +
    + PyCTBN.estimators.structure_estimator +
    + PyCTBN.estimators.structure_score_based_estimator +
    + PyCTBN.optimizers +
    + PyCTBN.optimizers.constraint_based_optimizer +
    + PyCTBN.optimizers.hill_climbing_search +
    + PyCTBN.optimizers.optimizer +
    + PyCTBN.optimizers.tabu_search +
    + PyCTBN.structure_graph +
    + PyCTBN.structure_graph.abstract_sample_path +
    + PyCTBN.structure_graph.conditional_intensity_matrix +
    + PyCTBN.structure_graph.network_graph +
    + PyCTBN.structure_graph.sample_path +
    + PyCTBN.structure_graph.set_of_cims +
    + PyCTBN.structure_graph.sets_of_cims_container +
    + PyCTBN.structure_graph.structure +
    + PyCTBN.structure_graph.trajectory +
    + PyCTBN.utility +
    + PyCTBN.utility.abstract_importer +
    + PyCTBN.utility.cache +
    + PyCTBN.utility.decorators +
    + PyCTBN.utility.json_importer +
    + PyCTBN.utility.sample_importer +
+ + +
+
+ + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/search.html b/docs/_build/html/search.html new file mode 100644 index 0000000..1733d10 --- /dev/null +++ b/docs/_build/html/search.html @@ -0,0 +1,190 @@ + + + + + + + + + + + Search — PyCTBN 2.0 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + +
+
+
+
+
+ +
+
+ + + + + + + + + + + + +
+ +
+ +
+ + + +
+ +
+ +
+
+ + +
+ +
+

+ © Copyright 2021, Bregoli Alessandro, Martini Filippo, Moretti Luca. +

+
+ + Built with Sphinx using a theme provided by Porão do Juca. + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/_build/html/searchindex.js b/docs/_build/html/searchindex.js new file mode 100644 index 0000000..8bd7326 --- /dev/null +++ b/docs/_build/html/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({docnames:["PyCTBN","PyCTBN.estimators","PyCTBN.optimizers","PyCTBN.structure_graph","PyCTBN.utility","examples","index","modules"],envversion:{"sphinx.domains.c":2,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":3,"sphinx.domains.index":1,"sphinx.domains.javascript":2,"sphinx.domains.math":2,"sphinx.domains.python":2,"sphinx.domains.rst":2,"sphinx.domains.std":1,sphinx:56},filenames:["PyCTBN.rst","PyCTBN.estimators.rst","PyCTBN.optimizers.rst","PyCTBN.structure_graph.rst","PyCTBN.utility.rst","examples.rst","index.rst","modules.rst"],objects:{"":{PyCTBN:[0,0,0,"-"]},"PyCTBN.estimators":{fam_score_calculator:[1,0,0,"-"],parameters_estimator:[1,0,0,"-"],structure_constraint_based_estimator:[1,0,0,"-"],structure_estimator:[1,0,0,"-"],structure_score_based_estimator:[1,0,0,"-"]},"PyCTBN.estimators.fam_score_calculator":{FamScoreCalculator:[1,1,1,""]},"PyCTBN.estimators.fam_score_calculator.FamScoreCalculator":{get_fam_score:[1,2,1,""],marginal_likelihood_q:[1,2,1,""],marginal_likelihood_theta:[1,2,1,""],single_cim_xu_marginal_likelihood_q:[1,2,1,""],single_cim_xu_marginal_likelihood_theta:[1,2,1,""],single_internal_cim_xxu_marginal_likelihood_theta:[1,2,1,""],variable_cim_xu_marginal_likelihood_q:[1,2,1,""],variable_cim_xu_marginal_likelihood_theta:[1,2,1,""]},"PyCTBN.estimators.parameters_estimator":{ParametersEstimator:[1,1,1,""]},"PyCTBN.estimators.parameters_estimator.ParametersEstimator":{compute_parameters:[1,2,1,""],compute_parameters_for_node:[1,2,1,""],compute_state_res_time_for_node:[1,2,1,""],compute_state_transitions_for_a_node:[1,2,1,""],fast_init:[1,2,1,""],init_sets_cims_container:[1,2,1,""]},"PyCTBN.estimators.structure_constraint_based_estimator":{StructureConstraintBasedEstimator:[1,1,1,""]},"PyCTBN.estimators.structure_constraint_based_estimator.StructureConstraintBasedEstimator":{complete_test:[1,2,1,""],compute_thumb_value:[1,2,1,""],ctpc_algorithm:[1,2,1,""],estimate_structure:[1,2,1,""],independence_test:[1,2,1,""],one_iteration_of_CTPC_algorithm:[1,2,1,""]},"PyCTBN.estimators.structure_estimator":{StructureEstimator:[1,1,1,""]},"PyCTBN.estimators.structure_estimator.StructureEstimator":{adjacency_matrix:[1,2,1,""],build_complete_graph:[1,2,1,""],build_removable_edges_matrix:[1,2,1,""],estimate_structure:[1,2,1,""],generate_possible_sub_sets_of_size:[1,2,1,""],remove_diagonal_elements:[1,2,1,""],save_plot_estimated_structure_graph:[1,2,1,""],save_results:[1,2,1,""],spurious_edges:[1,2,1,""]},"PyCTBN.estimators.structure_score_based_estimator":{StructureScoreBasedEstimator:[1,1,1,""]},"PyCTBN.estimators.structure_score_based_estimator.StructureScoreBasedEstimator":{estimate_parents:[1,2,1,""],estimate_structure:[1,2,1,""],get_score_from_graph:[1,2,1,""]},"PyCTBN.optimizers":{constraint_based_optimizer:[2,0,0,"-"],hill_climbing_search:[2,0,0,"-"],optimizer:[2,0,0,"-"],tabu_search:[2,0,0,"-"]},"PyCTBN.optimizers.constraint_based_optimizer":{ConstraintBasedOptimizer:[2,1,1,""]},"PyCTBN.optimizers.constraint_based_optimizer.ConstraintBasedOptimizer":{optimize_structure:[2,2,1,""]},"PyCTBN.optimizers.hill_climbing_search":{HillClimbing:[2,1,1,""]},"PyCTBN.optimizers.hill_climbing_search.HillClimbing":{optimize_structure:[2,2,1,""]},"PyCTBN.optimizers.optimizer":{Optimizer:[2,1,1,""]},"PyCTBN.optimizers.optimizer.Optimizer":{optimize_structure:[2,2,1,""]},"PyCTBN.optimizers.tabu_search":{TabuSearch:[2,1,1,""]},"PyCTBN.optimizers.tabu_search.TabuSearch":{optimize_structure:[2,2,1,""]},"PyCTBN.structure_graph":{abstract_sample_path:[3,0,0,"-"],conditional_intensity_matrix:[3,0,0,"-"],network_graph:[3,0,0,"-"],sample_path:[3,0,0,"-"],set_of_cims:[3,0,0,"-"],sets_of_cims_container:[3,0,0,"-"],structure:[3,0,0,"-"],trajectory:[3,0,0,"-"]},"PyCTBN.structure_graph.abstract_sample_path":{AbstractSamplePath:[3,1,1,""]},"PyCTBN.structure_graph.abstract_sample_path.AbstractSamplePath":{build_structure:[3,2,1,""],build_trajectories:[3,2,1,""]},"PyCTBN.structure_graph.conditional_intensity_matrix":{ConditionalIntensityMatrix:[3,1,1,""]},"PyCTBN.structure_graph.conditional_intensity_matrix.ConditionalIntensityMatrix":{cim:[3,2,1,""],compute_cim_coefficients:[3,2,1,""],state_residence_times:[3,2,1,""],state_transition_matrix:[3,2,1,""]},"PyCTBN.structure_graph.network_graph":{NetworkGraph:[3,1,1,""]},"PyCTBN.structure_graph.network_graph.NetworkGraph":{add_edges:[3,2,1,""],add_nodes:[3,2,1,""],build_p_comb_structure_for_a_node:[3,2,1,""],build_time_columns_filtering_for_a_node:[3,2,1,""],build_time_scalar_indexing_structure_for_a_node:[3,2,1,""],build_transition_filtering_for_a_node:[3,2,1,""],build_transition_scalar_indexing_structure_for_a_node:[3,2,1,""],clear_indexing_filtering_structures:[3,2,1,""],edges:[3,2,1,""],fast_init:[3,2,1,""],get_node_indx:[3,2,1,""],get_ordered_by_indx_set_of_parents:[3,2,1,""],get_parents_by_id:[3,2,1,""],get_positional_node_indx:[3,2,1,""],get_states_number:[3,2,1,""],has_edge:[3,2,1,""],init_graph:[3,2,1,""],nodes:[3,2,1,""],nodes_indexes:[3,2,1,""],nodes_values:[3,2,1,""],p_combs:[3,2,1,""],remove_edges:[3,2,1,""],remove_node:[3,2,1,""],time_filtering:[3,2,1,""],time_scalar_indexing_strucure:[3,2,1,""],transition_filtering:[3,2,1,""],transition_scalar_indexing_structure:[3,2,1,""]},"PyCTBN.structure_graph.sample_path":{SamplePath:[3,1,1,""]},"PyCTBN.structure_graph.sample_path.SamplePath":{build_structure:[3,2,1,""],build_trajectories:[3,2,1,""],clear_memory:[3,2,1,""],has_prior_net_structure:[3,2,1,""],structure:[3,2,1,""],total_variables_count:[3,2,1,""],trajectories:[3,2,1,""]},"PyCTBN.structure_graph.set_of_cims":{SetOfCims:[3,1,1,""]},"PyCTBN.structure_graph.set_of_cims.SetOfCims":{actual_cims:[3,2,1,""],build_cims:[3,2,1,""],build_times_and_transitions_structures:[3,2,1,""],filter_cims_with_mask:[3,2,1,""],get_cims_number:[3,2,1,""],p_combs:[3,2,1,""]},"PyCTBN.structure_graph.sets_of_cims_container":{SetsOfCimsContainer:[3,1,1,""]},"PyCTBN.structure_graph.sets_of_cims_container.SetsOfCimsContainer":{get_cims_of_node:[3,2,1,""],get_set_of_cims:[3,2,1,""],init_cims_structure:[3,2,1,""]},"PyCTBN.structure_graph.structure":{Structure:[3,1,1,""]},"PyCTBN.structure_graph.structure.Structure":{add_edge:[3,2,1,""],clean_structure_edges:[3,2,1,""],contains_edge:[3,2,1,""],edges:[3,2,1,""],get_node_id:[3,2,1,""],get_node_indx:[3,2,1,""],get_positional_node_indx:[3,2,1,""],get_states_number:[3,2,1,""],nodes_indexes:[3,2,1,""],nodes_labels:[3,2,1,""],nodes_values:[3,2,1,""],remove_edge:[3,2,1,""],remove_node:[3,2,1,""],total_variables_number:[3,2,1,""]},"PyCTBN.structure_graph.trajectory":{Trajectory:[3,1,1,""]},"PyCTBN.structure_graph.trajectory.Trajectory":{complete_trajectory:[3,2,1,""],size:[3,2,1,""],times:[3,2,1,""],trajectory:[3,2,1,""]},"PyCTBN.utility":{abstract_importer:[4,0,0,"-"],cache:[4,0,0,"-"],decorators:[4,0,0,"-"],json_importer:[4,0,0,"-"],sample_importer:[4,0,0,"-"]},"PyCTBN.utility.abstract_importer":{AbstractImporter:[4,1,1,""]},"PyCTBN.utility.abstract_importer.AbstractImporter":{build_list_of_samples_array:[4,2,1,""],build_sorter:[4,2,1,""],clear_concatenated_frame:[4,2,1,""],compute_row_delta_in_all_samples_frames:[4,2,1,""],compute_row_delta_sigle_samples_frame:[4,2,1,""],concatenated_samples:[4,2,1,""],dataset_id:[4,2,1,""],file_path:[4,2,1,""],sorter:[4,2,1,""],structure:[4,2,1,""],variables:[4,2,1,""]},"PyCTBN.utility.cache":{Cache:[4,1,1,""]},"PyCTBN.utility.cache.Cache":{clear:[4,2,1,""],find:[4,2,1,""],put:[4,2,1,""]},"PyCTBN.utility.decorators":{timing:[4,3,1,""],timing_write:[4,3,1,""]},"PyCTBN.utility.json_importer":{JsonImporter:[4,1,1,""]},"PyCTBN.utility.json_importer.JsonImporter":{build_sorter:[4,2,1,""],clear_data_frame_list:[4,2,1,""],dataset_id:[4,2,1,""],import_data:[4,2,1,""],import_sampled_cims:[4,2,1,""],import_structure:[4,2,1,""],import_trajectories:[4,2,1,""],import_variables:[4,2,1,""],normalize_trajectories:[4,2,1,""],one_level_normalizing:[4,2,1,""],read_json_file:[4,2,1,""]},"PyCTBN.utility.sample_importer":{SampleImporter:[4,1,1,""]},"PyCTBN.utility.sample_importer.SampleImporter":{build_sorter:[4,2,1,""],dataset_id:[4,2,1,""],import_data:[4,2,1,""]},PyCTBN:{estimators:[1,0,0,"-"],optimizers:[2,0,0,"-"],structure_graph:[3,0,0,"-"],utility:[4,0,0,"-"]}},objnames:{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","method","Python method"],"3":["py","function","Python function"]},objtypes:{"0":"py:module","1":"py:class","2":"py:method","3":"py:function"},terms:{"abstract":[1,2,3,4,5],"boolean":[1,3],"class":5,"default":[1,2],"float":1,"function":1,"import":[3,4,6],"int":[1,2,3,4],"null":1,"return":[1,2,3,4,5],"static":[1,3],"super":5,"true":[1,5],"var":5,"void":3,HAS:4,Has:[1,3],NOT:1,The:[1,3,4,5],Use:[1,5],__init__:5,_actual_cach:4,_actual_cim:3,_actual_trajectori:3,_aggregated_info_about_nodes_par:3,_array_indx:4,_cach:1,_cim:3,_complete_graph:1,_df_samples_list:[4,5],_df_structur:4,_df_variabl:[4,5],_file_path:5,_graph:[3,5],_import:3,_list_of_sets_of_par:4,_net_graph:1,_node:1,_node_id:3,_nodes_indx:1,_nodes_v:1,_p_combs_structur:3,_raw_data:4,_sample_path:1,_single_set_of_cim:1,_sorter:[4,5],_state_residence_tim:3,_structur:3,_structure_label:4,_time:3,_time_filt:3,_time_scalar_indexing_structur:3,_total_variables_count:3,_total_variables_numb:3,_trajectori:3,_transition_filt:3,_transition_matric:3,_transition_scalar_indexing_structur:3,_variables_label:4,abc:[2,3,4],about:[2,3],abstract_import:[0,3,7],abstract_sample_path:[0,7],abstractimport:[3,4,5],abstractsamplepath:3,act:4,actual:[1,3],actual_cim:[3,5],add:[3,4],add_edg:3,add_nod:3,added:1,addit:1,adjac:[1,5],adjacency_matrix:[1,5],after:4,against:1,aggreg:3,aggrega:3,algorithm:[1,2,5],all:[1,2,3,4,5],alpha_xu:1,alpha_xxu:1,alreadi:[4,5],also:[1,3],ani:[1,2],anoth:3,append:[1,3],approach:1,arc:4,arrai:[1,3,4,5],assign:[1,3],assum:1,attribuit:3,attribut:3,axi:5,base:[1,2,3,4],bayesian:1,befor:[1,2],belong:1,best:1,between:4,bool:[1,3],both:[1,4],bound:3,build:[1,3,4,5],build_cim:3,build_complete_graph:1,build_list_of_samples_arrai:4,build_p_comb_structure_for_a_nod:3,build_removable_edges_matrix:1,build_sort:[4,5],build_structur:[3,5],build_time_columns_filtering_for_a_nod:3,build_time_scalar_indexing_structure_for_a_nod:3,build_times_and_transitions_structur:3,build_trajectori:[3,5],build_transition_filtering_for_a_nod:3,build_transition_scalar_indexing_structure_for_a_nod:3,built:1,cach:[0,1,7],calcul:1,call:[4,5],cardin:[1,3,4],cardinalit:[3,4],caridin:3,caridinalit:3,chang:[3,4],check:3,chi:1,chi_test:1,chi_test_alfa:1,child:[1,2],child_indx:1,child_states_numb:1,child_val:1,cim1:1,cim2:1,cim:[1,3,4,5],cim_indx:3,cims_kei:4,clean_structure_edg:3,clear:[3,4],clear_concatenated_fram:4,clear_data_frame_list:4,clear_indexing_filtering_structur:3,clear_memori:3,climb:[1,2],coeffici:3,col:3,color:1,cols_filt:1,column:[1,3,4,5],columns_head:4,comb:3,combin:[3,4],combinatori:3,common:1,complet:[1,3,4],complete_test:1,complete_trajectori:3,comput:[1,2,3,4,5],compute_cim_coeffici:3,compute_paramet:1,compute_parameters_for_nod:[1,5],compute_row_delta_in_all_samples_fram:[4,5],compute_row_delta_sigle_samples_fram:4,compute_state_res_time_for_nod:1,compute_state_transitions_for_a_nod:1,compute_thumb_valu:1,concatanated_sampl:4,concaten:[3,4],concatenated_sampl:4,condit:3,conditional_intensity_matrix:[0,1,7],conditionalintensitymatrix:[1,3],consid:3,constraint:1,constraint_based_optim:[0,7],constraintbasedoptim:2,construct:[3,4,5],conta:4,contain:[1,3,4],contains_edg:3,content:[6,7],convert:[1,4],copi:4,core:4,correct:[3,4],could:1,count:3,creat:[1,3,5],csv:5,csvimport:5,ctbn:1,ctpc:[1,2,5],ctpc_algorithm:[1,5],current:[1,2,4],cut:4,dafram:4,data:[1,2,3,4,6],datafram:[3,4,5],dataset:[1,2,3,4],dataset_id:[4,5],datfram:4,decor:[0,7],def:5,defin:4,definit:4,defualt:1,delta:[1,3,4],demonstr:5,describ:4,desir:[1,3],df_samples_list:4,dict:[4,5],dictionari:4,differ:4,digraph:1,dimens:3,dir:5,direct:[1,3],directli:4,disabl:[1,2],disable_multiprocess:1,distribuit:1,doc:4,doubl:3,download:5,drop:5,duplic:3,dyn:5,each:[1,2,4],edg:[1,3,4,5],edges_list:3,end:4,entir:1,enumer:3,equal:3,est:5,estim:[0,2,3,6,7],estimate_par:1,estimate_structur:1,everi:[3,4],exam:5,exampl:[4,6],exclud:1,exctract:4,exist:4,exp_test_alfa:1,exponenti:1,expos:4,extend:5,extens:4,extract:[3,4],fals:1,fam_score_calcul:[0,7],famscor:1,famscorecalcul:1,fast_init:[1,3,5],file:[1,4,5],file_path:[4,5],filepath:4,fill:[1,5],filter:[1,3],filter_cims_with_mask:3,find:[1,4],first:[1,5],follow:[3,4],form:3,format:5,formula:1,found:4,frame:4,from:[1,3,4,5],from_nod:4,gener:1,generate_possible_sub_sets_of_s:1,get:[1,4],get_cims_numb:3,get_cims_of_nod:3,get_fam_scor:1,get_node_id:3,get_node_indx:3,get_ordered_by_indx_set_of_par:3,get_parents_by_id:3,get_positional_node_indx:3,get_score_from_graph:1,get_set_of_cim:3,get_states_numb:3,given:[1,3,4],glob:5,graph:[1,3,5],graph_struct:3,graphic:1,grid:3,grpah:5,has:[4,5],has_edg:3,has_prior_net_structur:3,have:4,header:4,header_column:4,hill:[1,2],hill_climbing_search:[0,7],hillclimb:2,hold:[1,3],how:4,hyperparamet:1,hypothesi:1,identifi:[1,3,4],iff:1,implement:[2,4,6],import_data:[4,5],import_sampled_cim:4,import_structur:4,import_trajectori:4,import_vari:[4,5],improv:[1,2],independ:1,independence_test:1,index:[1,3,4,5,6],indic:3,indx:[3,4],info:[3,5],inform:[2,3],init:5,init_cims_structur:3,init_graph:3,init_sets_cims_contain:1,initi:[1,3,4,5],inplac:5,input:1,insid:5,insiem:3,instal:6,instanc:3,interest:3,interfac:2,intes:3,iter:[1,2],iterations_numb:[1,2],its:[1,2],join:5,json:[1,4,5],json_import:[0,7],jsonarrai:4,jsonimport:[4,5],keep:[1,2,4],kei:[3,4],knowledg:1,known:1,known_edg:1,label:[1,2,3,4],lenght:[1,2],level:[1,4],likelihood:1,list:[1,2,3,4,5],list_of_column:3,list_of_edg:3,list_of_kei:3,list_of_nod:3,list_of_parents_states_numb:3,load:[1,4],loop:1,m_xu_suff_stat:1,m_xxu_suff_stat:1,main:5,margin:1,marginal_likelihood_q:1,marginal_likelihood_theta:1,mask:3,mask_arr:3,matric:[1,3],matrix:[1,3,4,5],max_par:[1,2],maximum:[1,2],member:[3,4],mention:3,merg:4,method:[1,4],model:1,modul:[6,7],multipl:4,multiprocess:1,name:[1,3,4,5],ndarrai:[1,3,4],necessari:[1,3,4],nest:4,net:[1,2,3,4,5],net_graph:1,network:[1,3,4],network_graph:[0,1,7],networkgraph:[1,3,5],networkx:1,node:[1,2,3,4,5],node_id:[1,2,3],node_index:3,node_indx:[1,3],node_st:3,node_states_numb:3,nodes_index:3,nodes_indexes_arr:3,nodes_label:3,nodes_labels_list:3,nodes_numb:3,nodes_vals_arr:3,nodes_valu:[3,5],none:[1,2,3,4,5],normal:4,normalize_trajectori:4,number:[1,2,3],numpi:[1,3,4],obj:5,object:[1,2,3,4,5],oggetti:3,one:[3,4],one_iteration_of_ctpc_algorithm:1,one_level_norm:4,onli:4,oper:1,optim:[0,1,7],optimize_structur:2,option:[1,2],order:[1,4],origin:4,original_cols_numb:3,otherwis:[1,4],out:4,outer:[4,5],over:1,own:6,p_comb:3,p_combs_list:3,p_indx:3,packag:7,page:6,panda:[4,5],param:3,paramet:[1,2,3,4,6],parameters_estim:[0,7],parametersestim:[1,5],parent:[1,2,3,4],parent_indx:1,parent_label:1,parent_set:1,parent_set_v:1,parent_v:1,parents_cardin:3,parents_comb:4,parents_index:3,parents_label:3,parents_states_numb:3,parents_v:3,parents_valu:3,part:1,particular:[1,4],pass:5,path:[1,4,5],patienc:[1,2],peest:5,perform:1,pip:5,place:4,plot:1,posit:[3,4],possibl:[1,3],predict:2,prepar:4,present:[1,4],print:5,prior:[1,5],prior_net_structur:4,process:[1,2,3,4],properli:4,properti:[3,4],put:4,pyctbn:5,q_xx:3,rappres:3,raw:4,raw_data:4,read:[4,5],read_csv:5,read_csv_fil:5,read_fil:5,read_json_fil:4,real:[1,3,4,5],red:1,refer:[3,4],reject:1,rel:3,relat:4,releas:5,remain:4,remov:[1,3,4],remove_diagonal_el:1,remove_edg:3,remove_nod:3,repres:3,represent:1,res:3,resid:[1,3],result:[1,4,5],results_:1,rtype:3,rule:[1,2],same:4,sampl:[3,4,5],sample_fram:[4,5],sample_import:[0,7],sample_path:[0,1,7],sampleimport:4,samplepath:[1,3,5],samples_label:4,save:[1,5],save_plot_estimated_structure_graph:1,save_result:[1,5],scalar_index:1,scalar_indexes_struct:1,score:1,se1:5,search:[1,2,6],second:1,see:4,select:5,self:[1,3,4,5],sep:1,sep_set:1,set:[1,3,4],set_of_cim:[0,1,4,7],setofcim:[1,3,4,5],sets_of_cim:3,sets_of_cims_contain:[0,7],setsofcimscontain:3,shift:[3,4],shifted_cols_head:4,show:1,signific:1,simbol:4,simpl:5,simpli:5,sinc:3,single_cim_xu_marginal_likelihood_q:1,single_cim_xu_marginal_likelihood_theta:1,single_internal_cim_xxu_marginal_likelihood_theta:1,size:[1,3],socim:[3,4],sofc1:5,sorter:4,specif:[1,3,5],spuriou:1,spurious_edg:1,start:4,state:[1,3],state_res_tim:3,state_residence_tim:3,state_transition_matrix:3,states_number_per_nod:3,statist:1,stop:[1,2],str:[1,2,3,4,5],string:[1,2,3,4],structur:[0,1,2,4,6,7],structure_constraint_based_estim:[0,7],structure_estim:[0,2,7],structure_estimation_exampl:5,structure_graph:[0,1,4,7],structure_label:4,structure_score_based_estim:[0,7],structureconstraintbasedestim:1,structureestim:[1,2,5],structurescorebasedestim:1,structut:3,style:1,submodul:[0,7],subpackag:7,subset:1,suffici:1,suffuci:1,symbol:[3,4],synthet:4,t_xu_suff_stat:1,tabu:[1,2],tabu_length:[1,2],tabu_rules_dur:[1,2],tabu_search:[0,7],tabusearch:2,take:5,tar:5,task:[1,3],tau_xu:1,ternari:5,test:1,test_child:1,test_par:1,tha:4,theta:1,thi:[1,3,4,5],three:5,thumb:1,thumb_threshold:1,thumb_valu:1,time:[1,3,4,5],time_filt:3,time_kei:4,time_scalar_indexing_strucur:3,timestamp:4,timing_writ:4,to_nod:4,tot_vars_count:[1,2],total:[1,3],total_variables_count:3,total_variables_numb:3,traj:4,trajecory_head:4,trajectori:[0,1,4,5,7],trajectories_kei:4,trajectorii:3,trajectory_list:4,trajectri:5,transit:[1,3,4],transition_filt:3,transition_matric:3,transition_scalar_indexing_structur:3,tri:4,tupl:3,tutori:4,two:1,type:[1,2,3,4,5],union:4,uniqu:4,unus:3,usag:6,use:5,used:[1,2,3,4],using:[1,2,3,4],util:[0,3,7],valu:[1,2,3,4,5],values_list:5,var_id:1,variabl:[1,2,3,4,5],variable_cardin:4,variable_cim_xu_marginal_likelihood_q:1,variable_cim_xu_marginal_likelihood_theta:1,variable_label:4,variables_kei:4,variables_label:4,vector:[1,3],want:5,when:1,where:[1,4],which:[1,2,3,4],whl:5,who:1,without:[1,2],word:1,you:[1,4,5],your:6},titles:["PyCTBN package","PyCTBN.estimators package","PyCTBN.optimizers package","PyCTBN.structure_graph package","PyCTBN.utility package","Examples","Welcome to PyCTBN\u2019s documentation!","PyCTBN"],titleterms:{"class":[0,1,2,3,4,7],"import":5,abstract_import:4,abstract_sample_path:3,cach:4,conditional_intensity_matrix:3,constraint_based_optim:2,content:[0,1,2,3,4],data:5,decor:4,document:6,estim:[1,5],exampl:5,fam_score_calcul:1,hill_climbing_search:2,implement:5,indic:6,instal:5,json_import:4,modul:[0,1,2,3,4],network_graph:3,optim:2,own:5,packag:[0,1,2,3,4],paramet:5,parameters_estim:1,pyctbn:6,sample_import:4,sample_path:3,set_of_cim:3,sets_of_cims_contain:3,structur:[3,5],structure_constraint_based_estim:1,structure_estim:1,structure_graph:3,structure_score_based_estim:1,submodul:[1,2,3,4],subpackag:0,tabl:6,tabu_search:2,trajectori:3,usag:5,util:4,welcom:6,your:5}}) \ No newline at end of file diff --git a/docs/classes.estimators.rst b/docs/classes.estimators.rst new file mode 100644 index 0000000..5ecab8b --- /dev/null +++ b/docs/classes.estimators.rst @@ -0,0 +1,53 @@ +PyCTBN.estimators package +========================== + +Submodules +---------- + +PyCTBN.estimators.fam\_score\_calculator module +------------------------------------------------ + +.. automodule:: PyCTBN.estimators.fam_score_calculator + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.estimators.parameters\_estimator module +----------------------------------------------- + +.. automodule:: PyCTBN.estimators.parameters_estimator + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.estimators.structure\_constraint\_based\_estimator module +----------------------------------------------------------------- + +.. automodule:: PyCTBN.estimators.structure_constraint_based_estimator + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.estimators.structure\_estimator module +---------------------------------------------- + +.. automodule:: PyCTBN.estimators.structure_estimator + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.estimators.structure\_score\_based\_estimator module +------------------------------------------------------------ + +.. automodule:: PyCTBN.estimators.structure_score_based_estimator + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: PyCTBN.estimators + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/classes.optimizers.rst b/docs/classes.optimizers.rst new file mode 100644 index 0000000..0730b68 --- /dev/null +++ b/docs/classes.optimizers.rst @@ -0,0 +1,45 @@ +PyCTBN.optimizers package +========================== + +Submodules +---------- + +PyCTBN.optimizers.constraint\_based\_optimizer module +------------------------------------------------------ + +.. automodule:: PyCTBN.optimizers.constraint_based_optimizer + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.optimizers.hill\_climbing\_search module +------------------------------------------------ + +.. automodule:: PyCTBN.optimizers.hill_climbing_search + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.optimizers.optimizer module +----------------------------------- + +.. automodule:: PyCTBN.optimizers.optimizer + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.optimizers.tabu\_search module +-------------------------------------- + +.. automodule:: PyCTBN.optimizers.tabu_search + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: PyCTBN.optimizers + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/classes.rst b/docs/classes.rst new file mode 100644 index 0000000..0ff219f --- /dev/null +++ b/docs/classes.rst @@ -0,0 +1,21 @@ +PyCTBN package +=============== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + PyCTBN.estimators + PyCTBN.optimizers + PyCTBN.structure_graph + PyCTBN.utility + +Module contents +--------------- + +.. automodule:: PyCTBN + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/classes.structure_graph.rst b/docs/classes.structure_graph.rst new file mode 100644 index 0000000..489b7b6 --- /dev/null +++ b/docs/classes.structure_graph.rst @@ -0,0 +1,77 @@ +PyCTBN.structure\_graph package +================================ + +Submodules +---------- + +PyCTBN.structure\_graph.abstract\_sample\_path module +------------------------------------------------------ + +.. automodule:: PyCTBN.structure_graph.abstract_sample_path + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.conditional\_intensity\_matrix module +-------------------------------------------------------------- + +.. automodule:: PyCTBN.structure_graph.conditional_intensity_matrix + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.network\_graph module +---------------------------------------------- + +.. automodule:: PyCTBN.structure_graph.network_graph + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.sample\_path module +-------------------------------------------- + +.. automodule:: PyCTBN.structure_graph.sample_path + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.set\_of\_cims module +--------------------------------------------- + +.. automodule:: PyCTBN.structure_graph.set_of_cims + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.sets\_of\_cims\_container module +--------------------------------------------------------- + +.. automodule:: PyCTBN.structure_graph.sets_of_cims_container + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.structure module +----------------------------------------- + +.. automodule:: PyCTBN.structure_graph.structure + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.structure\_graph.trajectory module +------------------------------------------ + +.. automodule:: PyCTBN.structure_graph.trajectory + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: PyCTBN.structure_graph + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/classes.utility.rst b/docs/classes.utility.rst new file mode 100644 index 0000000..6285db8 --- /dev/null +++ b/docs/classes.utility.rst @@ -0,0 +1,53 @@ +PyCTBN.utility package +======================= + +Submodules +---------- + +PyCTBN.utility.abstract\_importer module +----------------------------------------- + +.. automodule:: PyCTBN.utility.abstract_importer + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.utility.cache module +---------------------------- + +.. automodule:: PyCTBN.utility.cache + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.utility.decorators module +--------------------------------- + +.. automodule:: PyCTBN.utility.decorators + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.utility.json\_importer module +------------------------------------- + +.. automodule:: PyCTBN.utility.json_importer + :members: + :undoc-members: + :show-inheritance: + +PyCTBN.utility.sample\_importer module +--------------------------------------- + +.. automodule:: PyCTBN.utility.sample_importer + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: PyCTBN.utility + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..2acb0f0 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,70 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + +import os +import sys +sys.path.insert(0, os.path.abspath('../PyCTBN/')) +#sys.path.insert(0, os.path.abspath('../PyCTBN/PyCTBN')) +sys.path.insert(0, os.path.abspath('../PyCTBN/PyCTBN')) +sys.path.insert(0, os.path.abspath('../PyCTBN/PyCTBN/estimators')) +sys.path.insert(0, os.path.abspath('../PyCTBN/PyCTBN/optimizers')) +sys.path.insert(0, os.path.abspath('../PyCTBN/PyCTBN/structure_graph')) +sys.path.insert(0, os.path.abspath('../PyCTBN/PyCTBN/utility')) + +print(sys.path) + +# -- Project information ----------------------------------------------------- + +project = 'PyCTBN' +copyright = '2021, Bregoli Alessandro, Martini Filippo, Moretti Luca' +author = 'Bregoli Alessandro, Martini Filippo, Moretti Luca' + +# The full version, including alpha/beta/rc tags +release = '2.0' + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ['sphinx.ext.autodoc' +] + +#autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_pdj_theme' +import sphinx_pdj_theme +html_theme_path = [sphinx_pdj_theme.get_html_theme_path()] + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/examples.rst b/docs/examples.rst new file mode 100644 index 0000000..b1e07c5 --- /dev/null +++ b/docs/examples.rst @@ -0,0 +1,121 @@ +Examples +======== + +Installation/Usage +****************** +Download the release in .tar.gz or .whl format and simply use pip install to install it:: + + $pip install PyCTBN-1.0.tar.gz + + +Implementing your own data importer +*********************************** +.. code-block:: python + + """This example demonstrates the implementation of a simple data importer the extends the class abstract importer to import data in csv format. + The net in exam has three ternary nodes and no prior net structure. + """ + + from PyCTBN import AbstractImporter + + class CSVImporter(AbstractImporter): + + def __init__(self, file_path): + self._df_samples_list = None + super(CSVImporter, self).__init__(file_path) + + def import_data(self): + self.read_csv_file() + self._sorter = self.build_sorter(self._df_samples_list[0]) + self.import_variables() + self.compute_row_delta_in_all_samples_frames(self._df_samples_list) + + def read_csv_file(self): + df = pd.read_csv(self._file_path) + df.drop(df.columns[[0]], axis=1, inplace=True) + self._df_samples_list = [df] + + def import_variables(self): + values_list = [3 for var in self._sorter] + # initialize dict of lists + data = {'Name':self._sorter, 'Value':values_list} + # Create the pandas DataFrame + self._df_variables = pd.DataFrame(data) + + def build_sorter(self, sample_frame: pd.DataFrame) -> typing.List: + return list(sample_frame.columns)[1:] + + def dataset_id(self) -> object: + pass + +Parameters Estimation Example +***************************** + +.. code-block:: python + + from PyCTBN import JsonImporter + from PyCTBN import SamplePath + from PyCTBN import NetworkGraph + from PyCTBN import ParametersEstimator + + + def main(): + read_files = glob.glob(os.path.join('./data', "*.json")) #Take all json files in this dir + #import data + importer = JsonImporter(read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + importer.import_data(0) + #Create a SamplePath Obj passing an already filled AbstractImporter object + s1 = SamplePath(importer) + #Build The trajectries and the structural infos + s1.build_trajectories() + s1.build_structure() + print(s1.structure.edges) + print(s1.structure.nodes_values) + #From The Structure Object build the Graph + g = NetworkGraph(s1.structure) + #Select a node you want to estimate the parameters + node = g.nodes[2] + print("Node", node) + #Init the _graph specifically for THIS node + g.fast_init(node) + #Use SamplePath and Grpah to create a ParametersEstimator Object + p1 = ParametersEstimator(s1.trajectories, g) + #Init the peEst specifically for THIS node + p1.fast_init(node) + #Compute the parameters + sofc1 = p1.compute_parameters_for_node(node) + #The est CIMS are inside the resultant SetOfCIms Obj + print(sofc1.actual_cims) + +Structure Estimation Example +**************************** + +.. code-block:: python + + from PyCTBN import JsonImporter + from PyCTBN import SamplePath + from PyCTBN import StructureEstimator + + def structure_estimation_example(): + + # read the json files in ./data path + read_files = glob.glob(os.path.join('./data', "*.json")) + # initialize a JsonImporter object for the first file + importer = JsonImporter(read_files[0], 'samples', 'dyn.str', 'variables', 'Time', 'Name') + # import the data at index 0 of the outer json array + importer.import_data(0) + # construct a SamplePath Object passing a filled AbstractImporter + s1 = SamplePath(importer) + # build the trajectories + s1.build_trajectories() + # build the real structure + s1.build_structure() + # construct a StructureEstimator object + se1 = StructureEstimator(s1, 0.1, 0.1) + # call the ctpc algorithm + se1.ctpc_algorithm() + # the adjacency matrix of the estimated structure + print(se1.adjacency_matrix()) + # save results to a json file + se1.save_results() + diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..236bf53 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,21 @@ +.. PyCTBN documentation master file, created by + sphinx-quickstart on Wed Feb 24 18:06:35 2021. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to PyCTBN's documentation! +================================== + +.. toctree:: + :maxdepth: 3 + :caption: Contents: + + examples + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..922152e --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/modules.rst b/docs/modules.rst new file mode 100644 index 0000000..5b5819a --- /dev/null +++ b/docs/modules.rst @@ -0,0 +1,7 @@ +PyCTBN +======= + +.. toctree:: + :maxdepth: 4 + + PyCTBN diff --git a/main_package/classes/__init__.py b/main_package/classes/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/main_package/data/esempio_dataset.csv b/main_package/data/esempio_dataset.csv deleted file mode 100644 index 09f00dd..0000000 --- a/main_package/data/esempio_dataset.csv +++ /dev/null @@ -1,1864 +0,0 @@ -,Time,X,Y,Z -0,0.0,2,2,2 -1,0.0795,1,2,2 -2,0.0979,1,2,1 -3,0.1675,1,1,1 -4,0.2181,1,0,1 -5,0.3233,1,0,2 -6,0.4132,1,1,2 -7,0.5572,1,2,2 -8,0.6689,1,2,0 -9,0.7529,2,2,0 -10,0.8074,1,2,0 -11,0.8133,1,2,2 -12,0.8669,0,2,2 -13,0.9124,0,2,1 -14,0.9892,0,2,2 -15,1.0446,2,2,2 -16,1.0976,0,2,2 -17,1.1542,2,2,2 -18,1.1747,2,0,2 -19,1.1875,1,0,2 -20,1.204,1,1,2 -21,1.2138,1,2,2 -22,1.2627,1,2,0 -23,1.3446,1,1,0 -24,1.3733,1,2,0 -25,1.4162,1,2,1 -26,1.4438,1,1,1 -27,1.4645,0,1,1 -28,1.4807,1,1,1 -29,1.6109,1,0,1 -30,1.6348,2,0,1 -31,1.6714,1,0,1 -32,1.7189,1,2,1 -33,1.733,1,1,1 -34,1.8187,1,0,1 -35,1.8947,1,0,2 -36,1.9035,1,0,1 -37,1.9272,0,0,1 -38,1.9505,0,0,0 -39,2.0831,1,0,0 -40,2.136,1,0,1 -41,2.1623,1,0,2 -42,2.1857,1,0,0 -43,2.4266,1,0,2 -44,2.5518,1,0,1 -45,2.7025,0,0,1 -46,2.8049,0,0,0 -47,2.809,1,0,0 -48,2.8325,1,0,1 -49,2.8831,0,0,1 -50,2.9658,0,1,1 -51,3.1578,0,2,1 -52,3.1846,0,1,1 -53,3.3439,2,1,1 -54,3.3693,0,1,1 -55,3.3985,2,1,1 -56,3.4281,0,1,1 -57,3.4766,0,1,2 -58,3.526,0,2,2 -59,3.5697,0,0,2 -60,3.5974,0,2,2 -61,3.6629,1,2,2 -62,3.7055,1,2,0 -63,3.8235,1,0,0 -64,3.9595,1,2,0 -65,4.0246,1,2,1 -66,4.0262,1,2,2 -67,4.1098,1,1,2 -68,4.2318,1,1,0 -69,4.236,1,1,2 -70,4.2592,1,1,0 -71,4.2707,1,0,0 -72,4.3029,1,0,2 -73,4.3524,1,2,2 -74,4.3749,2,2,2 -75,4.3944,0,2,2 -76,4.4505,1,2,2 -77,4.5283,1,1,2 -78,4.5606,1,2,2 -79,4.5979,1,0,2 -80,4.79,1,2,2 -81,4.967,1,0,2 -82,4.9825,1,2,2 -83,4.9867,1,1,2 -84,5.0395,0,1,2 -85,5.1153,0,2,2 -86,5.1724,0,2,1 -87,5.2174,0,1,1 -88,5.2275,1,1,1 -89,5.255,1,0,1 -90,5.3457,1,1,1 -91,5.3758,1,0,1 -92,5.3988,1,1,1 -93,5.6471,1,1,0 -94,5.6582,1,1,2 -95,5.7239,1,1,0 -96,5.784,1,1,2 -97,5.8513,1,2,2 -98,5.99,1,2,0 -99,6.064,1,2,1 -100,6.1111,1,2,0 -101,6.223,1,2,1 -102,6.2316,1,0,1 -103,6.2572,1,2,1 -104,6.2691,1,1,1 -105,6.2935,1,2,1 -106,6.3944,1,2,0 -107,6.4256,1,1,0 -108,6.4348,1,2,0 -109,6.5102,1,1,0 -110,6.5208,1,2,0 -111,6.545,1,2,1 -112,6.5879,1,0,1 -113,6.6558,1,0,0 -114,6.6972,1,0,1 -115,6.7036,1,2,1 -116,6.713,1,2,0 -117,6.7366,1,2,2 -118,6.7522,1,2,0 -119,6.7586,1,0,0 -120,6.8169,1,2,0 -121,6.8337,1,2,2 -122,6.8463,1,2,1 -123,6.9374,1,1,1 -124,7.0371,1,0,1 -125,7.1873,1,0,2 -126,7.2019,2,0,2 -127,7.2859,2,0,1 -128,7.2957,2,0,2 -129,7.3575,1,0,2 -130,7.5165,1,0,1 -131,7.5863,1,0,2 -132,7.6055,1,0,1 -133,7.8261,1,0,0 -134,7.8353,1,2,0 -135,7.8646,1,1,0 -136,7.9157,1,0,0 -137,8.0518,1,2,0 -138,8.3077,2,2,0 -139,8.3346,2,0,0 -140,8.3875,2,2,0 -141,8.5414,0,2,0 -142,8.5571,0,2,1 -143,8.565,0,1,1 -144,8.6032,0,2,1 -145,8.6114,1,2,1 -146,8.6283,1,2,0 -147,8.6577,1,0,0 -148,8.6618,2,0,0 -149,8.6789,0,0,0 -150,8.7114,1,0,0 -151,8.8288,1,0,1 -152,8.8352,1,0,2 -153,8.8676,1,2,2 -154,8.934,2,2,2 -155,8.9999,2,2,1 -156,9.0146,2,1,1 -157,9.1077,2,1,0 -158,9.1677,2,1,1 -159,9.181,2,1,2 -160,9.2049,2,2,2 -161,9.2426,0,2,2 -162,9.3478,2,2,2 -163,9.4303,0,2,2 -164,9.5705,0,2,0 -165,9.6056,2,2,0 -166,9.6112,0,2,0 -167,9.6117,2,2,0 -168,9.663,1,2,0 -169,9.8987,2,2,0 -170,9.9486,2,0,0 -171,9.9563,0,0,0 -172,10.0293,0,2,0 -173,10.0652,2,2,0 -174,10.0683,0,2,0 -175,10.0895,0,1,0 -176,10.1199,0,0,0 -177,10.1656,2,0,0 -178,10.3267,2,2,0 -179,10.3593,1,2,0 -180,10.3677,1,2,2 -181,10.4038,2,2,2 -182,10.4184,2,1,2 -183,10.4541,2,1,1 -184,10.5078,2,2,1 -185,10.5507,2,1,1 -186,10.6822,1,1,1 -187,10.6895,1,1,0 -188,10.7081,1,0,0 -189,10.7297,1,0,1 -190,10.7299,1,2,1 -191,10.7827,1,2,2 -192,10.8673,1,1,2 -193,10.8734,1,2,2 -194,11.0143,1,2,0 -195,11.1236,1,2,2 -196,11.1941,1,2,0 -197,11.2744,2,2,0 -198,11.284,2,2,2 -199,11.3411,1,2,2 -200,11.4209,1,0,2 -201,11.425,1,1,2 -202,11.5115,1,1,0 -203,11.6297,1,2,0 -204,11.6371,0,2,0 -205,11.6628,1,2,0 -206,11.7306,1,1,0 -207,11.8644,1,2,0 -208,11.8744,2,2,0 -209,12.0028,1,2,0 -210,12.0231,1,0,0 -211,12.0381,1,0,2 -212,12.043,1,0,1 -213,12.0625,1,2,1 -214,12.0993,1,1,1 -215,12.1118,1,1,2 -216,12.1937,1,2,2 -217,12.2106,1,2,0 -218,12.2212,2,2,0 -219,12.2472,2,0,0 -220,12.2803,0,0,0 -221,12.304,0,0,1 -222,12.3087,0,0,2 -223,12.3185,1,0,2 -224,12.3413,1,0,1 -225,12.471,1,2,1 -226,12.6159,2,2,1 -227,12.734,2,0,1 -228,12.787,2,2,1 -229,12.8012,2,2,0 -230,12.8591,2,2,2 -231,12.8924,2,2,0 -232,13.0487,2,1,0 -233,13.1244,2,2,0 -234,13.1573,0,2,0 -235,13.2086,0,2,1 -236,13.3017,0,2,2 -237,13.5649,0,2,0 -238,13.6949,0,1,0 -239,13.7259,1,1,0 -240,13.7844,2,1,0 -241,13.8647,1,1,0 -242,13.9843,1,0,0 -243,13.9857,1,2,0 -244,14.0411,2,2,0 -245,14.1539,2,0,0 -246,14.4909,2,0,1 -247,14.6408,2,0,0 -248,14.802,2,0,2 -249,14.9388,2,0,1 -250,14.9523,2,2,1 -251,15.0736,2,1,1 -252,15.1878,2,1,2 -253,15.2382,2,2,2 -254,15.3132,2,1,2 -255,15.3179,2,2,2 -256,15.3212,0,2,2 -257,15.4264,1,2,2 -258,15.6402,2,2,2 -259,15.6767,2,1,2 -260,15.7451,1,1,2 -261,15.7472,1,1,0 -262,15.788,1,1,2 -263,15.7917,1,1,1 -264,15.9557,2,1,1 -265,15.991,2,2,1 -266,16.0447,2,1,1 -267,16.0905,2,2,1 -268,16.1135,2,0,1 -269,16.246,2,0,2 -270,16.2546,0,0,2 -271,16.2986,0,0,1 -272,16.3298,0,2,1 -273,16.3707,0,1,1 -274,16.4046,2,1,1 -275,16.4073,2,1,2 -276,16.4141,2,1,0 -277,16.4351,2,2,0 -278,16.4831,2,1,0 -279,16.5386,2,0,0 -280,16.5466,0,0,0 -281,16.6126,0,0,1 -282,16.6416,0,0,2 -283,16.6434,0,1,2 -284,16.7608,1,1,2 -285,16.8749,1,2,2 -286,16.891,0,2,2 -287,16.892,2,2,2 -288,17.0821,2,1,2 -289,17.1085,0,1,2 -290,17.1594,0,1,1 -291,17.285,0,0,1 -292,17.2895,1,0,1 -293,17.3458,1,0,2 -294,17.3749,2,0,2 -295,17.4182,2,0,0 -296,17.4365,0,0,0 -297,17.5099,0,2,0 -298,17.5182,0,0,0 -299,17.5507,0,2,0 -300,17.5916,0,2,2 -301,17.604,1,2,2 -302,17.7472,1,0,2 -303,17.813,2,0,2 -304,17.8192,2,2,2 -305,17.8344,2,1,2 -306,17.9153,0,1,2 -307,18.0901,0,2,2 -308,18.1454,0,2,1 -309,18.201,0,1,1 -310,18.2141,0,0,1 -311,18.3243,0,2,1 -312,18.3384,0,1,1 -313,18.3472,0,1,0 -314,18.4036,0,0,0 -315,18.409,0,0,1 -316,18.5107,0,2,1 -317,18.5911,1,2,1 -318,18.6044,1,2,2 -319,18.7143,1,0,2 -320,18.7363,1,0,1 -321,18.738,1,0,2 -322,18.8067,2,0,2 -323,18.8258,1,0,2 -324,18.8762,1,0,1 -325,18.8999,1,0,0 -326,18.9042,1,0,1 -327,18.934,1,2,1 -328,18.9645,1,1,1 -329,19.241,1,0,1 -330,19.3245,1,1,1 -331,19.3432,1,0,1 -332,19.3641,1,1,1 -333,19.3711,0,1,1 -334,19.4859,2,1,1 -335,19.537,0,1,1 -336,19.608,0,2,1 -337,19.6275,0,0,1 -338,19.7304,1,0,1 -339,19.7306,1,2,1 -340,19.8026,2,2,1 -341,19.8083,2,1,1 -342,19.843,2,2,1 -343,19.8587,0,2,1 -344,19.9644,0,2,0 -345,20.0163,0,2,1 -346,20.0176,0,2,0 -347,20.0529,1,2,0 -348,20.0739,1,2,2 -349,20.1519,1,2,1 -350,20.388,1,1,1 -351,20.4335,1,0,1 -352,20.4947,1,0,2 -353,20.5781,2,0,2 -354,20.5964,0,0,2 -355,20.6086,0,2,2 -356,20.681,0,1,2 -357,20.6839,0,1,0 -358,20.7439,0,1,2 -359,20.7553,0,2,2 -360,20.7683,2,2,2 -361,20.9932,2,0,2 -362,21.0526,1,0,2 -363,21.1485,1,1,2 -364,21.2072,1,2,2 -365,21.3602,1,2,0 -366,21.4356,1,2,2 -367,21.5536,1,2,0 -368,21.5736,1,1,0 -369,21.5849,1,2,0 -370,21.6194,1,0,0 -371,21.62,1,0,2 -372,21.7054,1,0,1 -373,21.7906,1,1,1 -374,21.847,0,1,1 -375,22.1857,0,0,1 -376,22.2518,0,0,0 -377,22.2627,2,0,0 -378,22.2743,0,0,0 -379,22.3469,0,0,1 -380,22.4748,2,0,1 -381,22.4813,2,0,2 -382,22.4944,2,0,0 -383,22.5494,0,0,0 -384,22.6107,0,0,1 -385,22.6279,0,1,1 -386,22.6969,1,1,1 -387,22.7149,1,1,0 -388,22.7303,1,0,0 -389,22.8614,1,0,1 -390,22.8737,1,0,2 -391,22.8882,1,0,1 -392,22.8958,1,2,1 -393,22.9424,1,0,1 -394,22.981,1,2,1 -395,22.9897,1,2,2 -396,22.9973,1,1,2 -397,23.081,1,2,2 -398,23.1523,2,2,2 -399,23.1983,2,0,2 -400,23.2627,2,2,2 -401,23.3006,2,2,1 -402,23.3421,2,0,1 -403,23.381,0,0,1 -404,23.3978,2,0,1 -405,23.4183,2,0,2 -406,23.4212,1,0,2 -407,23.4245,2,0,2 -408,23.546,2,1,2 -409,23.5486,1,1,2 -410,23.6357,1,2,2 -411,23.8447,1,2,0 -412,23.8738,2,2,0 -413,23.9067,2,2,2 -414,23.9577,2,1,2 -415,23.9735,0,1,2 -416,24.0201,0,0,2 -417,24.0572,0,2,2 -418,24.0905,0,2,0 -419,24.1886,1,2,0 -420,24.2253,1,2,2 -421,24.3021,1,2,1 -422,24.36,1,2,0 -423,24.3677,1,2,1 -424,24.4821,2,2,1 -425,24.485,1,2,1 -426,24.4971,1,2,2 -427,24.5322,1,2,0 -428,24.5709,1,0,0 -429,24.5953,1,0,2 -430,24.6746,1,2,2 -431,24.6918,1,0,2 -432,24.7596,1,1,2 -433,24.9392,1,2,2 -434,24.9557,1,2,1 -435,24.9841,1,2,0 -436,25.0531,1,2,1 -437,25.0655,1,2,2 -438,25.0709,1,1,2 -439,25.1702,1,1,1 -440,25.1886,1,0,1 -441,25.2167,1,2,1 -442,25.3794,1,0,1 -443,25.4413,1,0,2 -444,25.5297,1,0,1 -445,25.5369,1,1,1 -446,25.5486,1,1,0 -447,25.6292,1,0,0 -448,25.7656,0,0,0 -449,25.7678,0,0,1 -450,25.9455,0,2,1 -451,25.9613,0,1,1 -452,26.0037,0,0,1 -453,26.0062,0,0,2 -454,26.026,1,0,2 -455,26.0617,1,2,2 -456,26.0662,1,1,2 -457,26.0778,1,1,0 -458,26.1318,2,1,0 -459,26.1697,2,0,0 -460,26.2511,0,0,0 -461,26.3814,0,0,1 -462,26.3867,0,0,2 -463,26.4286,0,0,0 -464,26.5313,0,1,0 -465,26.599,0,0,0 -466,26.6982,0,0,2 -467,26.7073,1,0,2 -468,26.742,1,2,2 -469,26.8259,1,1,2 -470,26.947,1,1,1 -471,26.9619,0,1,1 -472,26.9777,1,1,1 -473,27.0568,0,1,1 -474,27.3265,0,1,2 -475,27.3511,1,1,2 -476,27.3684,1,1,0 -477,27.3789,1,0,0 -478,27.3849,1,2,0 -479,27.4694,1,2,2 -480,27.5131,1,2,0 -481,27.558,1,0,0 -482,27.5596,0,0,0 -483,27.6507,0,2,0 -484,27.7795,0,2,1 -485,27.8356,1,2,1 -486,27.8576,1,2,2 -487,27.8941,1,1,2 -488,27.936,1,2,2 -489,27.9585,1,1,2 -490,27.9806,1,1,0 -491,28.0413,1,0,0 -492,28.0943,1,0,1 -493,28.1653,1,0,2 -494,28.2819,1,1,2 -495,28.2897,1,1,0 -496,28.2962,1,2,0 -497,28.5013,1,2,2 -498,28.6755,1,0,2 -499,28.7102,1,2,2 -500,28.7214,1,2,0 -501,28.7727,1,2,2 -502,28.8352,1,0,2 -503,28.8509,1,1,2 -504,28.8776,1,2,2 -505,28.9025,1,2,0 -506,28.9258,2,2,0 -507,28.9575,2,1,0 -508,29.0573,0,1,0 -509,29.1549,2,1,0 -510,29.1576,2,2,0 -511,29.1725,2,2,1 -512,29.1854,0,2,1 -513,29.2548,0,2,0 -514,29.2671,2,2,0 -515,29.378,0,2,0 -516,29.4219,0,1,0 -517,29.4451,2,1,0 -518,29.4578,2,2,0 -519,29.4717,0,2,0 -520,29.6018,1,2,0 -521,29.6065,1,2,1 -522,29.7716,1,2,0 -523,29.7819,0,2,0 -524,29.8965,0,0,0 -525,29.9296,0,0,1 -526,29.9317,0,0,2 -527,29.9414,0,1,2 -528,30.0248,2,1,2 -529,30.0491,2,2,2 -530,30.1023,1,2,2 -531,30.1669,1,2,1 -532,30.301,1,2,0 -533,30.3476,1,1,0 -534,30.3549,0,1,0 -535,30.3884,0,0,0 -536,30.4346,0,2,0 -537,30.4746,1,2,0 -538,30.4973,1,1,0 -539,30.5991,2,1,0 -540,30.6394,2,1,2 -541,30.6989,2,2,2 -542,30.7811,0,2,2 -543,30.8491,0,2,1 -544,30.8653,0,2,0 -545,31.1308,0,1,0 -546,31.1957,0,1,2 -547,31.3925,0,2,2 -548,31.4364,2,2,2 -549,31.5823,0,2,2 -550,31.585,0,1,2 -551,31.5959,2,1,2 -552,31.5961,1,1,2 -553,31.641,1,2,2 -554,31.6488,1,1,2 -555,31.7463,1,2,2 -556,31.8959,1,2,0 -557,31.9496,1,2,2 -558,31.9686,1,0,2 -559,31.9973,1,0,1 -560,32.1254,1,2,1 -561,32.2004,1,2,0 -562,32.2289,1,2,2 -563,32.2882,1,0,2 -564,32.3392,1,0,1 -565,32.3789,1,0,0 -566,32.386,1,0,1 -567,32.4003,1,0,2 -568,32.4026,0,0,2 -569,32.4059,0,0,1 -570,32.4545,2,0,1 -571,32.4824,0,0,1 -572,32.5412,0,2,1 -573,32.5465,2,2,1 -574,32.604,2,2,2 -575,32.663,2,0,2 -576,32.6753,2,0,0 -577,32.7262,2,2,0 -578,32.7814,2,2,2 -579,32.833,0,2,2 -580,32.8788,2,2,2 -581,32.8833,2,1,2 -582,32.904,2,2,2 -583,33.0008,2,0,2 -584,33.093,1,0,2 -585,33.0944,2,0,2 -586,33.1435,2,1,2 -587,33.208,2,2,2 -588,33.2809,2,1,2 -589,33.3212,1,1,2 -590,33.359,1,1,0 -591,33.4519,2,1,0 -592,33.4709,1,1,0 -593,33.4803,1,1,2 -594,33.562,0,1,2 -595,33.568,0,1,1 -596,33.5975,0,1,2 -597,33.6277,0,1,0 -598,33.6351,0,0,0 -599,33.68,0,0,2 -600,33.7583,2,0,2 -601,33.7615,2,2,2 -602,33.8483,2,0,2 -603,33.8601,2,2,2 -604,33.8687,2,2,0 -605,33.8735,1,2,0 -606,33.878,1,1,0 -607,34.0028,1,2,0 -608,34.0271,2,2,0 -609,34.0327,0,2,0 -610,34.0627,2,2,0 -611,34.3017,2,2,2 -612,34.3199,2,1,2 -613,34.3202,2,1,0 -614,34.3493,0,1,0 -615,34.4607,2,1,0 -616,34.5047,2,0,0 -617,34.5223,1,0,0 -618,34.562,2,0,0 -619,34.5944,0,0,0 -620,34.5967,2,0,0 -621,34.6872,1,0,0 -622,34.6907,1,0,2 -623,34.8604,1,2,2 -624,34.8742,1,2,0 -625,34.9015,1,1,0 -626,35.0209,1,2,0 -627,35.1462,0,2,0 -628,35.1853,0,2,2 -629,35.2545,0,2,1 -630,35.3178,2,2,1 -631,35.344,0,2,1 -632,35.3626,2,2,1 -633,35.3737,2,0,1 -634,35.4375,2,1,1 -635,35.4394,2,1,0 -636,35.4477,2,1,2 -637,35.4692,1,1,2 -638,35.4881,1,1,0 -639,35.6352,1,2,0 -640,35.6432,1,2,1 -641,35.694,1,2,2 -642,35.7844,1,1,2 -643,35.7948,1,2,2 -644,35.9062,1,2,1 -645,35.9209,1,1,1 -646,35.9299,1,0,1 -647,35.9506,1,2,1 -648,36.0014,2,2,1 -649,36.0043,2,2,2 -650,36.0454,0,2,2 -651,36.0887,0,2,1 -652,36.0961,0,0,1 -653,36.1445,0,0,2 -654,36.1459,2,0,2 -655,36.1695,1,0,2 -656,36.1876,1,0,0 -657,36.3722,1,0,1 -658,36.5983,1,2,1 -659,36.7255,1,2,2 -660,37.0271,0,2,2 -661,37.061,0,2,0 -662,37.1062,0,0,0 -663,37.197,0,0,1 -664,37.4171,0,0,0 -665,37.478,0,0,1 -666,37.5307,0,1,1 -667,37.646,2,1,1 -668,37.6548,2,1,0 -669,37.6695,0,1,0 -670,37.8353,0,1,2 -671,37.8464,0,2,2 -672,37.8633,1,2,2 -673,37.9266,1,0,2 -674,37.9429,2,0,2 -675,38.0492,2,0,1 -676,38.0766,1,0,1 -677,38.1013,2,0,1 -678,38.1448,0,0,1 -679,38.1451,0,2,1 -680,38.1572,0,2,0 -681,38.1902,2,2,0 -682,38.2291,2,0,0 -683,38.2953,0,0,0 -684,38.4535,1,0,0 -685,38.5217,1,0,1 -686,38.5717,0,0,1 -687,38.665,0,1,1 -688,38.6775,0,0,1 -689,38.7657,0,2,1 -690,38.7766,0,1,1 -691,38.8258,0,1,2 -692,38.8516,1,1,2 -693,38.8983,1,1,1 -694,38.9502,1,0,1 -695,38.9958,1,0,2 -696,39.3057,1,0,1 -697,39.3228,1,2,1 -698,39.3403,1,2,2 -699,39.4363,1,1,2 -700,39.4493,1,1,1 -701,39.4618,1,2,1 -702,39.5144,0,2,1 -703,39.5216,0,0,1 -704,39.5819,2,0,1 -705,39.601,2,1,1 -706,39.6222,2,1,0 -707,39.6504,0,1,0 -708,39.6791,0,1,2 -709,39.7127,0,0,2 -710,39.7361,0,0,1 -711,39.7852,0,0,0 -712,39.8897,0,2,0 -713,39.9036,0,0,0 -714,39.9633,2,0,0 -715,39.9806,0,0,0 -716,40.0178,0,2,0 -717,40.1145,2,2,0 -718,40.1716,2,2,2 -719,40.1862,2,2,1 -720,40.3393,2,2,0 -721,40.3975,2,2,1 -722,40.4097,1,2,1 -723,40.4134,1,2,2 -724,40.5548,1,1,2 -725,40.5952,1,2,2 -726,40.598,1,2,0 -727,40.6079,1,1,0 -728,40.6137,1,1,1 -729,40.7164,1,1,2 -730,40.8416,1,1,1 -731,40.8528,1,1,2 -732,40.9449,1,2,2 -733,41.0573,1,0,2 -734,41.1593,1,0,1 -735,41.1688,2,0,1 -736,41.2155,2,1,1 -737,41.2636,2,1,0 -738,41.352,2,2,0 -739,41.421,2,1,0 -740,41.4699,1,1,0 -741,41.5288,1,0,0 -742,41.5463,1,0,1 -743,41.5764,1,2,1 -744,41.5895,1,2,0 -745,41.6683,1,1,0 -746,41.6971,1,1,1 -747,41.7045,1,1,2 -748,41.7223,2,1,2 -749,41.7293,0,1,2 -750,41.8027,2,1,2 -751,41.808,2,1,0 -752,41.8673,2,0,0 -753,41.8678,0,0,0 -754,41.8684,0,0,2 -755,41.8931,1,0,2 -756,41.9085,2,0,2 -757,42.0736,0,0,2 -758,42.1111,1,0,2 -759,42.1254,1,0,1 -760,42.1839,1,0,2 -761,42.1915,1,2,2 -762,42.197,1,0,2 -763,42.2787,0,0,2 -764,42.3331,0,2,2 -765,42.3814,1,2,2 -766,42.4777,1,1,2 -767,42.642,1,1,0 -768,42.6886,0,1,0 -769,42.7757,0,1,2 -770,42.7917,1,1,2 -771,42.7978,1,1,1 -772,42.8293,1,1,0 -773,42.94,2,1,0 -774,43.0639,2,0,0 -775,43.0666,0,0,0 -776,43.1991,0,0,1 -777,43.2138,0,2,1 -778,43.2359,1,2,1 -779,43.2754,1,0,1 -780,43.2877,1,1,1 -781,43.3022,1,1,2 -782,43.3135,2,1,2 -783,43.3401,2,1,0 -784,43.3823,2,2,0 -785,43.4078,2,2,2 -786,43.4915,2,2,0 -787,43.4987,2,2,2 -788,43.5521,1,2,2 -789,43.7632,2,2,2 -790,43.8124,2,0,2 -791,43.8271,1,0,2 -792,43.8332,2,0,2 -793,43.8577,2,0,1 -794,43.8873,2,0,0 -795,43.8901,2,0,2 -796,43.9351,0,0,2 -797,43.9744,0,1,2 -798,44.0585,0,2,2 -799,44.0642,1,2,2 -800,44.1328,1,0,2 -801,44.1856,1,1,2 -802,44.2647,1,2,2 -803,44.2986,0,2,2 -804,44.3459,2,2,2 -805,44.5809,2,1,2 -806,44.658,2,1,1 -807,44.7978,1,1,1 -808,44.8305,1,0,1 -809,44.8628,2,0,1 -810,44.8897,0,0,1 -811,44.8912,1,0,1 -812,44.9644,1,0,2 -813,44.9733,1,0,1 -814,44.9782,1,2,1 -815,44.9786,1,2,2 -816,44.9992,1,1,2 -817,45.0222,1,1,1 -818,45.1632,2,1,1 -819,45.2428,0,1,1 -820,45.3771,0,0,1 -821,45.418,0,0,2 -822,45.47,1,0,2 -823,45.4772,2,0,2 -824,45.4997,2,0,0 -825,45.5748,1,0,0 -826,45.5975,1,2,0 -827,45.6268,1,1,0 -828,45.6412,1,0,0 -829,45.746,2,0,0 -830,45.7851,2,0,1 -831,45.8283,2,2,1 -832,45.8458,2,2,2 -833,45.8994,2,1,2 -834,45.8998,0,1,2 -835,45.9098,1,1,2 -836,46.0034,1,1,1 -837,46.0424,1,0,1 -838,46.1504,1,0,2 -839,46.2049,1,2,2 -840,46.241,2,2,2 -841,46.2579,2,1,2 -842,46.2717,0,1,2 -843,46.2968,1,1,2 -844,46.4702,1,1,1 -845,46.5575,1,1,2 -846,46.6118,1,1,0 -847,46.6158,0,1,0 -848,46.7248,0,0,0 -849,46.7332,1,0,0 -850,46.7405,2,0,0 -851,46.7838,1,0,0 -852,46.8302,1,0,1 -853,47.0207,1,2,1 -854,47.0274,1,2,0 -855,47.0392,1,0,0 -856,47.24,2,0,0 -857,47.3767,2,0,2 -858,47.4601,2,1,2 -859,47.5123,0,1,2 -860,47.5491,2,1,2 -861,47.6347,2,2,2 -862,47.6515,2,1,2 -863,47.7127,0,1,2 -864,47.7303,0,1,0 -865,47.7693,1,1,0 -866,47.7899,1,0,0 -867,47.9561,1,1,0 -868,47.9768,1,2,0 -869,47.9958,0,2,0 -870,48.0401,0,2,1 -871,48.1287,0,2,2 -872,48.1829,0,2,0 -873,48.217,0,2,1 -874,48.2407,0,1,1 -875,48.246,0,1,2 -876,48.2959,1,1,2 -877,48.3505,1,2,2 -878,48.6209,1,0,2 -879,48.6731,0,0,2 -880,48.7934,0,0,0 -881,48.8246,1,0,0 -882,48.8612,1,2,0 -883,48.8862,1,0,0 -884,48.9116,1,0,2 -885,48.9219,1,2,2 -886,48.9355,0,2,2 -887,48.9484,1,2,2 -888,48.9699,1,0,2 -889,48.9819,2,0,2 -890,49.0093,0,0,2 -891,49.0188,0,2,2 -892,49.1049,1,2,2 -893,49.284,2,2,2 -894,49.3282,2,1,2 -895,49.4233,0,1,2 -896,49.4444,0,1,1 -897,49.4768,2,1,1 -898,49.4798,0,1,1 -899,49.4942,1,1,1 -900,49.6552,2,1,1 -901,49.6946,2,2,1 -902,49.7204,0,2,1 -903,49.7859,2,2,1 -904,49.7864,2,0,1 -905,49.8174,1,0,1 -906,49.8615,1,0,2 -907,49.9572,1,2,2 -908,50.032,1,0,2 -909,50.2144,1,2,2 -910,50.2776,1,1,2 -911,50.3029,1,2,2 -912,50.3349,1,2,0 -913,50.3425,1,1,0 -914,50.401,1,2,0 -915,50.4278,1,0,0 -916,50.4624,1,2,0 -917,50.6934,1,1,0 -918,50.7814,1,1,1 -919,50.782,1,2,1 -920,50.8019,1,1,1 -921,50.8166,1,0,1 -922,50.8671,1,0,0 -923,50.8786,2,0,0 -924,50.9204,2,0,1 -925,50.9862,0,0,1 -926,51.0075,0,0,2 -927,51.0086,0,0,1 -928,51.0345,0,0,2 -929,51.0889,0,0,0 -930,51.0902,0,2,0 -931,51.0985,0,2,1 -932,51.2129,1,2,1 -933,51.26,1,2,0 -934,51.3187,0,2,0 -935,51.3518,0,1,0 -936,51.3544,0,2,0 -937,51.469,0,1,0 -938,51.5026,0,2,0 -939,51.5296,0,1,0 -940,51.5636,0,2,0 -941,51.7784,0,0,0 -942,51.8686,0,2,0 -943,51.9565,2,2,0 -944,51.9777,2,2,2 -945,52.1087,2,2,1 -946,52.1398,2,2,2 -947,52.1971,2,0,2 -948,52.2547,2,0,1 -949,52.271,2,0,2 -950,52.3833,2,2,2 -951,52.3951,2,1,2 -952,52.4047,2,1,1 -953,52.4455,2,1,0 -954,52.5004,1,1,0 -955,52.5161,0,1,0 -956,52.544,0,0,0 -957,52.5442,1,0,0 -958,52.5654,1,0,1 -959,52.7153,1,0,2 -960,52.7661,1,2,2 -961,52.7947,1,1,2 -962,52.853,1,0,2 -963,52.8736,1,2,2 -964,53.0678,1,1,2 -965,53.1599,1,1,0 -966,53.1829,1,2,0 -967,53.2851,1,2,1 -968,53.3814,1,1,1 -969,53.4873,1,2,1 -970,53.4959,1,2,2 -971,53.5013,1,0,2 -972,53.5033,1,0,1 -973,53.5558,1,0,0 -974,53.5613,1,0,2 -975,53.5963,1,1,2 -976,53.7209,1,0,2 -977,53.7993,2,0,2 -978,53.8346,2,1,2 -979,53.8433,2,1,0 -980,53.9248,2,1,2 -981,53.9997,0,1,2 -982,54.0045,1,1,2 -983,54.0206,1,2,2 -984,54.0747,1,2,0 -985,54.0778,1,1,0 -986,54.1275,1,0,0 -987,54.1604,1,0,1 -988,54.1697,1,0,2 -989,54.1762,2,0,2 -990,54.282,2,2,2 -991,54.2966,2,1,2 -992,54.3719,1,1,2 -993,54.4028,1,1,0 -994,54.4513,1,0,0 -995,54.4683,1,0,2 -996,54.4821,1,2,2 -997,54.5601,1,0,2 -998,54.5852,1,2,2 -999,54.6504,2,2,2 -1000,54.7021,1,2,2 -1001,54.7282,1,2,1 -1002,54.7325,1,2,2 -1003,54.8082,1,1,2 -1004,54.8417,1,2,2 -1005,55.0424,1,2,1 -1006,55.0656,1,0,1 -1007,55.1133,2,0,1 -1008,55.1711,2,0,0 -1009,55.1827,2,1,0 -1010,55.1833,1,1,0 -1011,55.2378,1,1,2 -1012,55.3369,1,2,2 -1013,55.3409,1,2,0 -1014,55.3535,1,1,0 -1015,55.3602,1,2,0 -1016,55.4003,1,2,2 -1017,55.4586,1,0,2 -1018,55.4904,1,2,2 -1019,55.6486,2,2,2 -1020,55.7091,2,1,2 -1021,55.8616,2,2,2 -1022,55.895,2,0,2 -1023,55.9184,2,2,2 -1024,55.9352,2,2,0 -1025,55.9803,0,2,0 -1026,56.0149,0,0,0 -1027,56.0514,0,0,1 -1028,56.1097,0,0,2 -1029,56.1498,0,2,2 -1030,56.1713,0,0,2 -1031,56.1778,0,1,2 -1032,56.327,0,1,0 -1033,56.4688,0,0,0 -1034,56.6018,2,0,0 -1035,56.6833,1,0,0 -1036,56.754,0,0,0 -1037,56.8233,0,2,0 -1038,56.8711,0,1,0 -1039,56.8764,0,1,1 -1040,56.8817,1,1,1 -1041,56.9164,1,1,2 -1042,56.9465,1,1,1 -1043,56.9787,1,2,1 -1044,57.0522,1,0,1 -1045,57.144,1,2,1 -1046,57.1676,0,2,1 -1047,57.2436,1,2,1 -1048,57.4194,1,0,1 -1049,57.4333,1,0,0 -1050,57.4458,1,0,1 -1051,57.4658,1,1,1 -1052,57.5619,1,1,2 -1053,57.5989,1,1,1 -1054,57.6595,1,1,2 -1055,57.7821,0,1,2 -1056,57.7886,0,2,2 -1057,57.8393,0,1,2 -1058,57.8511,0,2,2 -1059,57.8748,0,2,1 -1060,57.8827,1,2,1 -1061,57.8858,1,2,2 -1062,57.8984,1,0,2 -1063,57.9161,1,2,2 -1064,58.0479,1,2,0 -1065,58.0559,1,2,1 -1066,58.1276,1,2,2 -1067,58.1912,1,0,2 -1068,58.3022,1,0,1 -1069,58.4511,1,0,2 -1070,58.5193,2,0,2 -1071,58.5498,2,1,2 -1072,58.5675,0,1,2 -1073,58.6935,0,1,0 -1074,58.8336,0,2,0 -1075,59.0031,1,2,0 -1076,59.0568,1,0,0 -1077,59.1181,1,0,1 -1078,59.1816,1,0,2 -1079,59.2001,1,1,2 -1080,59.2787,1,1,1 -1081,59.5875,1,1,2 -1082,59.6261,1,1,0 -1083,59.6268,1,2,0 -1084,59.6474,1,2,2 -1085,59.6491,1,1,2 -1086,59.6836,1,1,1 -1087,59.6938,1,2,1 -1088,59.7043,1,2,0 -1089,59.7483,1,1,0 -1090,59.7874,1,1,2 -1091,59.8575,1,1,0 -1092,60.0145,1,2,0 -1093,60.0261,1,0,0 -1094,60.0688,1,2,0 -1095,60.0978,1,0,0 -1096,60.1422,1,0,1 -1097,60.1445,1,2,1 -1098,60.2295,1,2,0 -1099,60.2301,1,1,0 -1100,60.2861,1,1,1 -1101,60.3492,1,1,2 -1102,60.4328,1,1,1 -1103,60.4919,1,0,1 -1104,60.6452,1,2,1 -1105,60.6543,1,1,1 -1106,60.7234,1,1,2 -1107,60.7757,1,1,0 -1108,60.7874,1,2,0 -1109,60.8061,1,1,0 -1110,60.8543,1,0,0 -1111,60.9125,1,0,2 -1112,60.975,1,2,2 -1113,61.0191,1,1,2 -1114,61.025,1,1,0 -1115,61.0868,1,0,0 -1116,61.0906,1,0,2 -1117,61.1139,2,0,2 -1118,61.1255,2,0,0 -1119,61.1752,2,2,0 -1120,61.1765,0,2,0 -1121,61.1912,2,2,0 -1122,61.2947,0,2,0 -1123,61.3013,2,2,0 -1124,61.4061,2,1,0 -1125,61.4336,2,0,0 -1126,61.466,2,0,2 -1127,61.485,2,2,2 -1128,61.5117,0,2,2 -1129,61.5313,2,2,2 -1130,61.6431,0,2,2 -1131,61.8245,1,2,2 -1132,62.0663,1,0,2 -1133,62.118,1,2,2 -1134,62.1497,1,0,2 -1135,62.2431,1,2,2 -1136,62.3672,1,1,2 -1137,62.3731,1,1,0 -1138,62.4261,1,2,0 -1139,62.4462,1,2,2 -1140,62.5013,1,1,2 -1141,62.5064,1,2,2 -1142,62.5692,0,2,2 -1143,62.6088,0,0,2 -1144,62.6144,0,2,2 -1145,62.6446,0,0,2 -1146,62.7838,0,2,2 -1147,62.815,0,1,2 -1148,62.8695,0,2,2 -1149,62.8742,0,1,2 -1150,62.8881,0,2,2 -1151,62.8895,1,2,2 -1152,62.891,1,1,2 -1153,62.9383,1,1,1 -1154,62.9421,2,1,1 -1155,62.9551,0,1,1 -1156,62.9838,0,1,0 -1157,63.0047,2,1,0 -1158,63.0913,2,2,0 -1159,63.1177,0,2,0 -1160,63.1179,2,2,0 -1161,63.298,2,2,2 -1162,63.3074,2,0,2 -1163,63.31,2,0,0 -1164,63.4633,1,0,0 -1165,63.6307,1,0,2 -1166,63.6379,1,0,1 -1167,63.6539,1,0,2 -1168,63.6775,1,2,2 -1169,63.7173,0,2,2 -1170,63.8565,1,2,2 -1171,63.8945,1,2,0 -1172,63.941,1,1,0 -1173,63.9628,0,1,0 -1174,63.9898,0,0,0 -1175,64.0071,0,0,2 -1176,64.053,0,0,1 -1177,64.0998,0,0,2 -1178,64.1664,0,0,1 -1179,64.1757,1,0,1 -1180,64.1792,1,1,1 -1181,64.1891,1,0,1 -1182,64.3983,1,1,1 -1183,64.4431,1,0,1 -1184,64.47,1,2,1 -1185,64.4914,1,2,0 -1186,64.523,1,1,0 -1187,64.5562,1,2,0 -1188,64.5827,1,0,0 -1189,64.5844,1,0,1 -1190,64.6006,1,0,0 -1191,64.9407,2,0,0 -1192,64.9974,2,1,0 -1193,65.0504,2,2,0 -1194,65.1846,2,2,2 -1195,65.33,1,2,2 -1196,65.3614,1,2,1 -1197,65.3623,2,2,1 -1198,65.4658,2,1,1 -1199,65.4681,1,1,1 -1200,65.6767,1,2,1 -1201,65.6902,1,1,1 -1202,65.7844,0,1,1 -1203,65.7868,0,1,2 -1204,65.8823,0,0,2 -1205,65.9254,1,0,2 -1206,65.9518,2,0,2 -1207,66.1833,1,0,2 -1208,66.2696,1,0,1 -1209,66.3522,1,2,1 -1210,66.4081,1,0,1 -1211,66.4536,1,1,1 -1212,66.5094,1,1,0 -1213,66.5724,1,2,0 -1214,66.5959,1,1,0 -1215,66.6136,1,2,0 -1216,66.8624,1,2,1 -1217,66.8874,1,2,0 -1218,66.9112,0,2,0 -1219,66.9434,1,2,0 -1220,66.9865,1,2,1 -1221,66.9927,0,2,1 -1222,67.0154,0,1,1 -1223,67.0561,2,1,1 -1224,67.059,2,1,0 -1225,67.1251,0,1,0 -1226,67.166,2,1,0 -1227,67.1871,2,1,1 -1228,67.2127,2,0,1 -1229,67.2443,2,1,1 -1230,67.3317,2,0,1 -1231,67.3319,2,2,1 -1232,67.4256,0,2,1 -1233,67.4604,1,2,1 -1234,67.6128,1,2,2 -1235,67.6369,0,2,2 -1236,67.6506,0,2,1 -1237,67.6706,0,2,0 -1238,67.7192,0,2,1 -1239,67.8187,0,2,0 -1240,67.8378,0,1,0 -1241,67.8427,0,1,2 -1242,67.9206,0,2,2 -1243,67.9582,0,1,2 -1244,68.1134,2,1,2 -1245,68.1213,0,1,2 -1246,68.2287,2,1,2 -1247,68.2687,2,2,2 -1248,68.2922,2,1,2 -1249,68.3399,2,1,1 -1250,68.3508,0,1,1 -1251,68.5625,0,1,2 -1252,68.6382,1,1,2 -1253,68.6824,1,0,2 -1254,68.6931,2,0,2 -1255,68.7135,0,0,2 -1256,68.7328,0,0,1 -1257,68.8458,0,2,1 -1258,68.9071,0,1,1 -1259,68.9285,0,2,1 -1260,68.9314,0,0,1 -1261,68.9319,0,2,1 -1262,68.9631,2,2,1 -1263,68.9868,2,1,1 -1264,68.9876,2,1,0 -1265,69.0074,2,0,0 -1266,69.0511,2,0,2 -1267,69.0677,2,1,2 -1268,69.0756,2,2,2 -1269,69.1248,2,0,2 -1270,69.1439,0,0,2 -1271,69.1497,2,0,2 -1272,69.2007,2,2,2 -1273,69.2284,2,2,1 -1274,69.2515,0,2,1 -1275,69.2547,0,0,1 -1276,69.2739,0,0,2 -1277,69.2832,0,0,0 -1278,69.2946,2,0,0 -1279,69.3776,2,0,2 -1280,69.3837,2,2,2 -1281,69.3984,2,2,0 -1282,69.4081,1,2,0 -1283,69.4366,1,2,1 -1284,69.5526,1,1,1 -1285,69.6874,1,1,2 -1286,69.703,1,1,1 -1287,69.7244,1,1,2 -1288,69.7497,1,1,1 -1289,69.8551,2,1,1 -1290,69.8718,0,1,1 -1291,69.9079,0,0,1 -1292,70.1237,0,0,0 -1293,70.1311,0,2,0 -1294,70.1343,0,2,1 -1295,70.173,2,2,1 -1296,70.2074,2,2,0 -1297,70.2111,2,2,2 -1298,70.2229,2,0,2 -1299,70.2252,0,0,2 -1300,70.2273,2,0,2 -1301,70.2466,2,0,1 -1302,70.2897,2,0,0 -1303,70.291,2,0,1 -1304,70.3737,0,0,1 -1305,70.4019,2,0,1 -1306,70.4416,0,0,1 -1307,70.5093,0,1,1 -1308,70.5816,0,0,1 -1309,70.6166,0,0,2 -1310,70.6504,0,0,0 -1311,70.7002,0,1,0 -1312,70.7044,0,2,0 -1313,70.7638,0,1,0 -1314,70.833,2,1,0 -1315,70.8635,2,2,0 -1316,70.8789,0,2,0 -1317,70.9021,0,2,1 -1318,70.9126,0,1,1 -1319,70.9639,0,0,1 -1320,71.0243,2,0,1 -1321,71.0255,2,0,2 -1322,71.0661,2,1,2 -1323,71.2639,2,2,2 -1324,71.3872,0,2,2 -1325,71.4153,0,2,1 -1326,71.4335,0,2,0 -1327,71.5769,2,2,0 -1328,71.6025,0,2,0 -1329,71.6647,0,2,1 -1330,71.6716,0,2,0 -1331,71.7172,2,2,0 -1332,71.7296,0,2,0 -1333,71.7552,0,1,0 -1334,71.8533,2,1,0 -1335,71.8844,1,1,0 -1336,71.9072,1,0,0 -1337,72.0414,1,2,0 -1338,72.0973,1,1,0 -1339,72.1028,1,1,2 -1340,72.1127,1,2,2 -1341,72.1238,0,2,2 -1342,72.126,0,1,2 -1343,72.1608,2,1,2 -1344,72.2405,2,2,2 -1345,72.3169,1,2,2 -1346,72.347,1,1,2 -1347,72.3687,1,1,1 -1348,72.5125,0,1,1 -1349,72.6006,2,1,1 -1350,72.6467,2,0,1 -1351,72.677,2,0,2 -1352,72.7177,0,0,2 -1353,72.7537,0,1,2 -1354,72.9848,0,1,0 -1355,72.9945,0,1,2 -1356,73.0426,0,1,0 -1357,73.0788,2,1,0 -1358,73.0914,2,0,0 -1359,73.125,2,1,0 -1360,73.1545,2,0,0 -1361,73.3142,2,0,2 -1362,73.3237,2,2,2 -1363,73.341,2,2,0 -1364,73.3541,2,2,2 -1365,73.4379,1,2,2 -1366,73.4627,2,2,2 -1367,73.5002,2,2,1 -1368,73.5801,2,2,0 -1369,73.6162,2,2,2 -1370,73.7423,2,1,2 -1371,73.7758,2,1,0 -1372,73.7823,2,0,0 -1373,73.8852,2,0,1 -1374,73.9343,2,0,2 -1375,73.959,1,0,2 -1376,73.9628,1,1,2 -1377,73.9795,1,0,2 -1378,74.0485,1,2,2 -1379,74.1278,2,2,2 -1380,74.1932,2,2,1 -1381,74.2277,2,0,1 -1382,74.2403,2,2,1 -1383,74.3338,2,0,1 -1384,74.348,2,0,2 -1385,74.3845,2,0,1 -1386,74.4718,2,0,2 -1387,74.4874,2,2,2 -1388,74.5067,2,1,2 -1389,74.5132,0,1,2 -1390,74.5162,0,1,0 -1391,74.5909,2,1,0 -1392,74.6526,0,1,0 -1393,74.7129,2,1,0 -1394,74.7502,2,0,0 -1395,74.7741,1,0,0 -1396,74.7979,1,0,1 -1397,74.9251,1,0,2 -1398,74.9706,0,0,2 -1399,74.9895,1,0,2 -1400,75.0389,1,0,1 -1401,75.0464,1,2,1 -1402,75.2052,1,1,1 -1403,75.3646,1,1,2 -1404,75.4292,1,1,0 -1405,75.5615,0,1,0 -1406,75.5932,0,1,2 -1407,75.6371,0,1,0 -1408,75.6783,0,2,0 -1409,75.7239,1,2,0 -1410,75.7269,1,0,0 -1411,75.8568,1,2,0 -1412,76.0307,1,2,2 -1413,76.1863,1,1,2 -1414,76.3023,1,1,0 -1415,76.3416,1,2,0 -1416,76.3582,1,0,0 -1417,76.4189,1,0,1 -1418,76.422,1,1,1 -1419,76.4454,1,1,0 -1420,76.5259,1,2,0 -1421,76.5619,2,2,0 -1422,76.5771,1,2,0 -1423,76.5976,1,1,0 -1424,76.6947,1,1,2 -1425,76.7887,1,0,2 -1426,76.864,1,0,0 -1427,76.9518,1,2,0 -1428,76.9746,1,2,1 -1429,77.0618,0,2,1 -1430,77.078,2,2,1 -1431,77.0894,2,0,1 -1432,77.1235,2,2,1 -1433,77.143,2,2,2 -1434,77.1536,0,2,2 -1435,77.1875,0,2,0 -1436,77.2361,1,2,0 -1437,77.2684,1,1,0 -1438,77.273,1,1,1 -1439,77.4354,1,1,0 -1440,77.47,1,2,0 -1441,77.5096,1,2,2 -1442,77.6405,1,1,2 -1443,77.698,1,1,1 -1444,77.8847,1,1,2 -1445,77.9016,1,2,2 -1446,77.9883,1,1,2 -1447,78.0231,1,2,2 -1448,78.0391,0,2,2 -1449,78.1124,0,1,2 -1450,78.1536,0,1,0 -1451,78.1624,1,1,0 -1452,78.2059,1,2,0 -1453,78.2407,1,2,2 -1454,78.2956,0,2,2 -1455,78.4285,0,1,2 -1456,78.4916,0,0,2 -1457,78.5156,0,2,2 -1458,78.54,2,2,2 -1459,78.5535,1,2,2 -1460,78.5978,1,1,2 -1461,78.622,1,2,2 -1462,78.6396,1,2,1 -1463,78.7314,0,2,1 -1464,78.7337,0,0,1 -1465,78.7433,2,0,1 -1466,78.7624,2,0,2 -1467,78.7711,0,0,2 -1468,78.8588,0,1,2 -1469,78.8763,2,1,2 -1470,78.9382,2,2,2 -1471,78.9939,2,1,2 -1472,79.0739,2,2,2 -1473,79.1912,2,0,2 -1474,79.2292,2,1,2 -1475,79.245,2,2,2 -1476,79.2779,1,2,2 -1477,79.3108,1,1,2 -1478,79.3303,1,2,2 -1479,79.4297,1,2,1 -1480,79.4488,1,2,0 -1481,79.4703,1,2,1 -1482,79.5834,1,1,1 -1483,79.608,1,2,1 -1484,79.6472,1,0,1 -1485,79.6723,1,1,1 -1486,79.9281,1,0,1 -1487,79.9525,1,2,1 -1488,79.9537,1,0,1 -1489,79.9748,0,0,1 -1490,80.0405,0,2,1 -1491,80.0744,1,2,1 -1492,80.0997,1,2,0 -1493,80.1552,2,2,0 -1494,80.1919,2,1,0 -1495,80.1944,0,1,0 -1496,80.2094,0,1,2 -1497,80.2907,0,1,0 -1498,80.2966,0,1,1 -1499,80.3072,1,1,1 -1500,80.3283,2,1,1 -1501,80.3622,2,0,1 -1502,80.4092,1,0,1 -1503,80.4514,1,2,1 -1504,80.4986,1,1,1 -1505,80.5212,1,0,1 -1506,80.5615,1,2,1 -1507,80.5697,1,2,2 -1508,80.6034,2,2,2 -1509,80.6184,1,2,2 -1510,80.6192,2,2,2 -1511,80.6204,2,0,2 -1512,80.6353,0,0,2 -1513,80.6483,0,0,1 -1514,80.6546,0,2,1 -1515,80.8419,2,2,1 -1516,80.8505,2,1,1 -1517,81.0409,2,1,0 -1518,81.1001,2,0,0 -1519,81.1104,0,0,0 -1520,81.3016,0,0,2 -1521,81.3639,0,2,2 -1522,81.4545,2,2,2 -1523,81.4746,0,2,2 -1524,81.5442,1,2,2 -1525,81.5589,1,1,2 -1526,81.5928,1,2,2 -1527,81.6212,1,2,1 -1528,81.8477,1,0,1 -1529,81.945,1,2,1 -1530,81.9992,0,2,1 -1531,82.0275,2,2,1 -1532,82.0426,0,2,1 -1533,82.0753,0,0,1 -1534,82.1103,2,0,1 -1535,82.2129,1,0,1 -1536,82.3542,1,1,1 -1537,82.3847,1,1,2 -1538,82.4053,1,1,0 -1539,82.4645,1,1,1 -1540,82.5793,1,2,1 -1541,82.6188,0,2,1 -1542,82.6404,0,0,1 -1543,82.69,2,0,1 -1544,82.7285,1,0,1 -1545,82.7627,1,2,1 -1546,82.9099,1,1,1 -1547,82.924,1,0,1 -1548,82.9388,1,0,2 -1549,82.9457,1,2,2 -1550,83.034,0,2,2 -1551,83.0344,0,2,0 -1552,83.0857,2,2,0 -1553,83.0903,0,2,0 -1554,83.1056,0,1,0 -1555,83.1871,1,1,0 -1556,83.3572,1,0,0 -1557,83.3867,2,0,0 -1558,83.4372,2,2,0 -1559,83.5193,2,1,0 -1560,83.5518,2,0,0 -1561,83.5726,2,0,1 -1562,83.626,1,0,1 -1563,83.642,1,0,2 -1564,83.6792,1,2,2 -1565,83.6894,1,1,2 -1566,83.6962,1,2,2 -1567,83.7851,1,2,0 -1568,83.7994,1,1,0 -1569,83.8917,1,0,0 -1570,83.9414,1,2,0 -1571,83.9695,1,1,0 -1572,84.017,1,1,1 -1573,84.0462,1,0,1 -1574,84.0902,1,1,1 -1575,84.1023,1,1,2 -1576,84.2031,0,1,2 -1577,84.2561,2,1,2 -1578,84.3242,1,1,2 -1579,84.3267,1,2,2 -1580,84.3318,1,2,1 -1581,84.3938,2,2,1 -1582,84.426,0,2,1 -1583,84.5476,0,2,2 -1584,84.5795,0,2,1 -1585,84.584,0,1,1 -1586,84.6349,0,1,2 -1587,84.6461,1,1,2 -1588,84.713,1,1,0 -1589,84.7134,1,2,0 -1590,84.7612,1,0,0 -1591,84.8494,1,2,0 -1592,84.9198,1,2,1 -1593,84.926,1,0,1 -1594,84.9783,1,2,1 -1595,84.9819,1,1,1 -1596,85.1614,2,1,1 -1597,85.2586,0,1,1 -1598,85.317,0,0,1 -1599,85.3222,0,0,2 -1600,85.3521,0,2,2 -1601,85.5275,0,0,2 -1602,85.5536,0,0,0 -1603,85.6113,2,0,0 -1604,85.6716,1,0,0 -1605,85.7768,0,0,0 -1606,85.7817,0,2,0 -1607,85.8483,1,2,0 -1608,86.0393,1,2,2 -1609,86.0648,1,2,1 -1610,86.104,1,0,1 -1611,86.1241,1,2,1 -1612,86.2204,1,1,1 -1613,86.2833,0,1,1 -1614,86.3256,0,0,1 -1615,86.3296,0,0,2 -1616,86.3632,0,1,2 -1617,86.5542,0,1,0 -1618,86.5556,2,1,0 -1619,86.6254,2,2,0 -1620,86.7892,2,2,2 -1621,86.8351,2,2,0 -1622,86.859,2,2,1 -1623,86.8702,2,2,2 -1624,86.9558,2,1,2 -1625,86.9738,2,2,2 -1626,87.0353,2,2,1 -1627,87.1105,2,2,2 -1628,87.1127,1,2,2 -1629,87.2116,1,0,2 -1630,87.2162,1,2,2 -1631,87.2288,1,1,2 -1632,87.2492,1,1,0 -1633,87.4093,1,2,0 -1634,87.4467,1,2,2 -1635,87.5657,2,2,2 -1636,87.6072,0,2,2 -1637,87.6164,0,2,0 -1638,87.6171,0,2,1 -1639,87.6511,2,2,1 -1640,87.7263,2,2,0 -1641,87.7701,2,0,0 -1642,87.7709,2,0,2 -1643,87.786,0,0,2 -1644,87.82,0,2,2 -1645,87.8296,1,2,2 -1646,87.8573,1,2,0 -1647,87.9113,1,1,0 -1648,87.9729,1,2,0 -1649,88.2559,1,2,1 -1650,88.2805,1,1,1 -1651,88.425,1,1,2 -1652,88.5093,0,1,2 -1653,88.5617,1,1,2 -1654,88.7929,1,1,0 -1655,88.8167,1,0,0 -1656,88.8786,1,0,2 -1657,88.9475,1,0,1 -1658,89.1265,1,2,1 -1659,89.1705,0,2,1 -1660,89.235,2,2,1 -1661,89.2529,2,0,1 -1662,89.324,2,0,0 -1663,89.3449,2,0,2 -1664,89.3722,2,2,2 -1665,89.4361,0,2,2 -1666,89.5011,1,2,2 -1667,89.5424,1,0,2 -1668,89.663,1,1,2 -1669,89.8345,1,1,1 -1670,89.8352,1,0,1 -1671,89.8489,1,2,1 -1672,89.8494,1,0,1 -1673,89.851,1,0,0 -1674,90.0136,1,1,0 -1675,90.1333,1,1,1 -1676,90.2379,1,1,2 -1677,90.3825,1,0,2 -1678,90.3992,1,0,1 -1679,90.4015,2,0,1 -1680,90.4291,0,0,1 -1681,90.5112,2,0,1 -1682,90.5311,1,0,1 -1683,90.5844,1,1,1 -1684,90.6863,1,2,1 -1685,90.7048,1,0,1 -1686,90.7435,1,0,2 -1687,90.8132,1,2,2 -1688,90.823,1,0,2 -1689,91.1317,1,0,0 -1690,91.1575,1,0,1 -1691,91.2982,1,0,2 -1692,91.3033,1,0,1 -1693,91.3197,0,0,1 -1694,91.3318,2,0,1 -1695,91.4891,1,0,1 -1696,91.6641,1,0,2 -1697,91.6878,1,2,2 -1698,91.739,1,2,1 -1699,91.7726,1,2,2 -1700,91.842,1,0,2 -1701,91.8435,1,2,2 -1702,92.0026,2,2,2 -1703,92.0465,2,1,2 -1704,92.057,1,1,2 -1705,92.135,1,0,2 -1706,92.1429,0,0,2 -1707,92.1584,0,1,2 -1708,92.268,0,1,0 -1709,92.2848,2,1,0 -1710,92.3066,2,2,0 -1711,92.3335,2,0,0 -1712,92.4529,1,0,0 -1713,92.4653,1,0,2 -1714,92.4965,2,0,2 -1715,92.6314,0,0,2 -1716,92.6466,0,1,2 -1717,92.705,0,2,2 -1718,92.7835,1,2,2 -1719,92.8322,1,1,2 -1720,92.8604,2,1,2 -1721,92.8648,2,1,0 -1722,92.8886,1,1,0 -1723,92.9563,0,1,0 -1724,93.0185,2,1,0 -1725,93.0472,2,0,0 -1726,93.0519,1,0,0 -1727,93.0632,0,0,0 -1728,93.1065,1,0,0 -1729,93.2822,1,0,1 -1730,93.2832,1,0,2 -1731,93.411,1,0,1 -1732,93.4598,1,2,1 -1733,93.4983,1,2,0 -1734,93.5105,1,0,0 -1735,93.5982,1,0,2 -1736,93.6578,1,2,2 -1737,93.7847,1,1,2 -1738,93.8024,2,1,2 -1739,93.8416,2,1,1 -1740,93.8669,2,1,2 -1741,93.9362,2,2,2 -1742,94.1152,2,1,2 -1743,94.1643,0,1,2 -1744,94.2284,1,1,2 -1745,94.2336,1,1,1 -1746,94.333,1,1,2 -1747,94.4192,1,0,2 -1748,94.4331,1,0,1 -1749,94.4962,1,0,0 -1750,94.5353,1,0,2 -1751,94.5411,1,1,2 -1752,94.5555,1,1,0 -1753,94.5742,1,1,2 -1754,94.5876,1,1,1 -1755,94.613,1,1,2 -1756,94.6449,1,1,0 -1757,94.6502,1,0,0 -1758,94.6578,1,0,2 -1759,94.7383,1,2,2 -1760,94.8089,2,2,2 -1761,94.8365,2,2,1 -1762,94.8398,2,0,1 -1763,94.8654,2,0,0 -1764,94.884,0,0,0 -1765,94.887,2,0,0 -1766,94.934,2,0,2 -1767,94.98,1,0,2 -1768,95.016,0,0,2 -1769,95.0688,1,0,2 -1770,95.239,1,2,2 -1771,95.4006,1,1,2 -1772,95.4394,1,1,1 -1773,95.4625,1,2,1 -1774,95.485,1,2,2 -1775,95.4956,1,2,1 -1776,95.5109,1,2,2 -1777,95.5131,2,2,2 -1778,95.6887,1,2,2 -1779,95.717,1,0,2 -1780,95.8036,1,1,2 -1781,95.8356,2,1,2 -1782,95.8415,2,2,2 -1783,95.8573,0,2,2 -1784,95.8639,2,2,2 -1785,95.9058,0,2,2 -1786,95.9096,0,0,2 -1787,95.949,0,2,2 -1788,95.9575,0,1,2 -1789,96.1054,0,2,2 -1790,96.2514,0,1,2 -1791,96.2927,1,1,2 -1792,96.3744,1,1,0 -1793,96.3918,1,1,2 -1794,96.5096,1,1,1 -1795,96.5913,1,2,1 -1796,96.6428,1,2,0 -1797,96.7423,1,1,0 -1798,96.7469,1,0,0 -1799,96.8154,2,0,0 -1800,96.8339,0,0,0 -1801,96.837,0,0,1 -1802,96.8595,0,1,1 -1803,96.8881,0,1,0 -1804,96.9219,0,1,2 -1805,96.9881,0,1,0 -1806,96.9881,1,1,0 -1807,97.0005,1,0,0 -1808,97.0255,1,0,1 -1809,97.0302,1,2,1 -1810,97.0411,1,1,1 -1811,97.0667,1,1,0 -1812,97.0779,1,0,0 -1813,97.1854,1,0,2 -1814,97.2579,1,2,2 -1815,97.3316,1,2,1 -1816,97.4264,0,2,1 -1817,97.446,0,1,1 -1818,97.4573,0,2,1 -1819,97.4874,0,0,1 -1820,97.4917,2,0,1 -1821,97.5029,2,0,2 -1822,97.5248,1,0,2 -1823,97.6817,1,0,1 -1824,97.6897,2,0,1 -1825,97.8303,0,0,1 -1826,97.8513,0,0,2 -1827,97.8675,0,0,1 -1828,97.8792,1,0,1 -1829,97.9157,1,1,1 -1830,98.0196,2,1,1 -1831,98.024,2,1,0 -1832,98.0294,2,1,2 -1833,98.0406,2,1,0 -1834,98.1331,2,0,0 -1835,98.1975,0,0,0 -1836,98.2551,1,0,0 -1837,98.2805,2,0,0 -1838,98.3159,2,0,1 -1839,98.3289,2,2,1 -1840,98.382,1,2,1 -1841,98.3909,1,2,2 -1842,98.5164,1,2,1 -1843,98.5526,2,2,1 -1844,98.5534,2,2,2 -1845,98.5998,2,1,2 -1846,98.6135,2,2,2 -1847,98.6542,1,2,2 -1848,98.7799,1,2,1 -1849,98.8027,0,2,1 -1850,98.8145,1,2,1 -1851,98.8459,1,1,1 -1852,98.9322,1,0,1 -1853,99.124,1,2,1 -1854,99.3102,1,0,1 -1855,99.4348,1,0,2 -1856,99.4377,1,2,2 -1857,99.4955,1,2,1 -1858,99.7188,1,0,1 -1859,99.9105,1,2,1 -1860,99.9363,1,2,0 -1861,99.9492,2,2,0 -1862,100.0,-1,-1,-1