diff --git a/Cargo.toml b/Cargo.toml index 9941ed6..7214634 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,7 @@ thiserror = "*" rand = "*" bimap = "*" enum_dispatch = "*" +statrs = "*" rand_chacha = "*" [dev-dependencies] diff --git a/src/lib.rs b/src/lib.rs index 65e4b11..ec12261 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -8,4 +8,5 @@ pub mod network; pub mod ctbn; pub mod tools; pub mod parameter_learning; +pub mod structure_learning; diff --git a/src/parameter_learning.rs b/src/parameter_learning.rs index 4fe3bdd..c4221cb 100644 --- a/src/parameter_learning.rs +++ b/src/parameter_learning.rs @@ -57,12 +57,12 @@ pub fn sufficient_statistics( let mut T: Array2 = Array::zeros((parentset_domain.iter().product(), node_domain)); //Compute the sufficient statistics - for trj in dataset.trajectories.iter() { - for idx in 0..(trj.time.len() - 1) { - let t1 = trj.time[idx]; - let t2 = trj.time[idx + 1]; - let ev1 = trj.events.row(idx); - let ev2 = trj.events.row(idx + 1); + for trj in dataset.get_trajectories().iter() { + for idx in 0..(trj.get_time().len() - 1) { + let t1 = trj.get_time()[idx]; + let t2 = trj.get_time()[idx + 1]; + let ev1 = trj.get_events().row(idx); + let ev2 = trj.get_events().row(idx + 1); let idx1 = vector_to_idx.dot(&ev1); T[[idx1, ev1[node]]] += t2 - t1; diff --git a/src/structure_learning.rs b/src/structure_learning.rs new file mode 100644 index 0000000..8ba91df --- /dev/null +++ b/src/structure_learning.rs @@ -0,0 +1,10 @@ +pub mod score_function; +pub mod score_based_algorithm; +use crate::network; +use crate::tools; + +pub trait StructureLearningAlgorithm { + fn fit_transform(&self, net: T, dataset: &tools::Dataset) -> T + where + T: network::Network; +} diff --git a/src/structure_learning/score_based_algorithm.rs b/src/structure_learning/score_based_algorithm.rs new file mode 100644 index 0000000..e57c4c1 --- /dev/null +++ b/src/structure_learning/score_based_algorithm.rs @@ -0,0 +1,83 @@ +use crate::network; +use crate::structure_learning::score_function::ScoreFunction; +use crate::structure_learning::StructureLearningAlgorithm; +use crate::tools; +use std::collections::BTreeSet; + +pub struct HillClimbing { + score_function: S, + max_parent_set: Option, +} + +impl HillClimbing { + pub fn init(score_function: S, max_parent_set: Option) -> HillClimbing { + HillClimbing { + score_function, + max_parent_set, + } + } +} + +impl StructureLearningAlgorithm for HillClimbing { + fn fit_transform(&self, net: T, dataset: &tools::Dataset) -> T + where + T: network::Network, + { + //Check the coherence between dataset and network + if net.get_number_of_nodes() != dataset.get_trajectories()[0].get_events().shape()[1] { + panic!("Dataset and Network must have the same number of variables.") + } + + //Make the network mutable. + let mut net = net; + //Check if the max_parent_set constraint is present. + let max_parent_set = self.max_parent_set.unwrap_or(net.get_number_of_nodes()); + //Reset the adj matrix + net.initialize_adj_matrix(); + //Iterate over each node to learn their parent set. + for node in net.get_node_indices() { + //Initialize an empty parent set. + let mut parent_set: BTreeSet = BTreeSet::new(); + //Compute the score for the empty parent set + let mut current_score = self.score_function.call(&net, node, &parent_set, dataset); + //Set the old score to -\infty. + let mut old_score = f64::NEG_INFINITY; + //Iterate until convergence + while current_score > old_score { + //Save the current_score. + old_score = current_score; + //Iterate over each node. + for parent in net.get_node_indices() { + //Continue if the parent and the node are the same. + if parent == node { + continue; + } + //Try to remove parent from the parent_set. + let is_removed = parent_set.remove(&parent); + //If parent was not in the parent_set add it. + if !is_removed && parent_set.len() < max_parent_set { + parent_set.insert(parent); + } + //Compute the score with the modified parent_set. + let tmp_score = self.score_function.call(&net, node, &parent_set, dataset); + //If tmp_score is worst than current_score revert the change to the parent set + if tmp_score < current_score { + if is_removed { + parent_set.insert(parent); + } else { + parent_set.remove(&parent); + } + } + //Otherwise save the computed score as current_score + else { + current_score = tmp_score; + } + } + } + //Apply the learned parent_set to the network struct. + parent_set.iter().for_each(|p| net.add_edge(*p, node)); + } + + return net; + } +} diff --git a/src/structure_learning/score_function.rs b/src/structure_learning/score_function.rs new file mode 100644 index 0000000..dba40e2 --- /dev/null +++ b/src/structure_learning/score_function.rs @@ -0,0 +1,136 @@ +use crate::network; +use crate::parameter_learning; +use crate::params; +use crate::tools; +use ndarray::prelude::*; +use statrs::function::gamma; +use std::collections::BTreeSet; + +pub trait ScoreFunction { + fn call( + &self, + net: &T, + node: usize, + parent_set: &BTreeSet, + dataset: &tools::Dataset, + ) -> f64 + where + T: network::Network; +} + +pub struct LogLikelihood { + alpha: usize, + tau: f64, +} + +impl LogLikelihood { + pub fn init(alpha: usize, tau: f64) -> LogLikelihood { + + //Tau must be >=0.0 + if tau < 0.0 { + panic!("tau must be >=0.0"); + } + LogLikelihood { alpha, tau } + } + + fn compute_score( + &self, + net: &T, + node: usize, + parent_set: &BTreeSet, + dataset: &tools::Dataset, + ) -> (f64, Array3) + where + T: network::Network, + { + //Identify the type of node used + match &net.get_node(node).params { + params::Params::DiscreteStatesContinousTime(_params) => { + //Compute the sufficient statistics M (number of transistions) and T (residence + //time) + let (M, T) = + parameter_learning::sufficient_statistics(net, dataset, node, parent_set); + + //Scale alpha accordingly to the size of the parent set + let alpha = self.alpha as f64 / M.shape()[0] as f64; + //Scale tau accordingly to the size of the parent set + let tau = self.tau / M.shape()[0] as f64; + + //Compute the log likelihood for q + let log_ll_q:f64 = M + .sum_axis(Axis(2)) + .iter() + .zip(T.iter()) + .map(|(m, t)| { + gamma::ln_gamma(alpha + *m as f64 + 1.0) + + (alpha + 1.0) * f64::ln(tau) + - gamma::ln_gamma(alpha + 1.0) + - (alpha + *m as f64 + 1.0) * f64::ln(tau + t) + }) + .sum(); + + //Compute the log likelihood for theta + let log_ll_theta: f64 = M.outer_iter() + .map(|x| x.outer_iter() + .map(|y| gamma::ln_gamma(alpha) + - gamma::ln_gamma(alpha + y.sum() as f64) + + y.iter().map(|z| + gamma::ln_gamma(alpha + *z as f64) + - gamma::ln_gamma(alpha)).sum::()).sum::()).sum(); + (log_ll_theta + log_ll_q, M) + } + } + } + + + +} + +impl ScoreFunction for LogLikelihood { + fn call( + &self, + net: &T, + node: usize, + parent_set: &BTreeSet, + dataset: &tools::Dataset, + ) -> f64 + where + T: network::Network, + { + self.compute_score(net, node, parent_set, dataset).0 + } +} + +pub struct BIC { + ll: LogLikelihood +} + +impl BIC { + pub fn init(alpha: usize, tau: f64) -> BIC { + BIC { + ll: LogLikelihood::init(alpha, tau) + } + } +} + +impl ScoreFunction for BIC { + fn call( + &self, + net: &T, + node: usize, + parent_set: &BTreeSet, + dataset: &tools::Dataset, + ) -> f64 + where + T: network::Network { + //Compute the log-likelihood + let (ll, M) = self.ll.compute_score(net, node, parent_set, dataset); + //Compute the number of parameters + let n_parameters = M.shape()[0] * M.shape()[1] * (M.shape()[2] - 1); + //TODO: Optimize this + //Compute the sample size + let sample_size: usize = dataset.get_trajectories().iter().map(|x| x.get_time().len() - 1).sum(); + //Compute BIC + ll - f64::ln(sample_size as f64) / 2.0 * n_parameters as f64 + } +} diff --git a/src/tools.rs b/src/tools.rs index 2a38d34..7cf205b 100644 --- a/src/tools.rs +++ b/src/tools.rs @@ -3,16 +3,54 @@ use crate::node; use crate::params; use crate::params::ParamsTrait; use ndarray::prelude::*; -use rand_chacha::ChaCha8Rng; use rand_chacha::rand_core::SeedableRng; +use rand_chacha::ChaCha8Rng; pub struct Trajectory { - pub time: Array1, - pub events: Array2, + time: Array1, + events: Array2, +} + +impl Trajectory { + pub fn init(time: Array1, events: Array2) -> Trajectory { + //Events and time are two part of the same trajectory. For this reason they must have the + //same number of sample. + if time.shape()[0] != events.shape()[0] { + panic!("time.shape[0] must be equal to events.shape[0]"); + } + Trajectory { time, events } + } + + pub fn get_time(&self) -> &Array1 { + &self.time + } + + pub fn get_events(&self) -> &Array2 { + &self.events + } } pub struct Dataset { - pub trajectories: Vec, + trajectories: Vec, +} + +impl Dataset { + pub fn init(trajectories: Vec) -> Dataset { + + //All the trajectories in the same dataset must represent the same process. For this reason + //each trajectory must represent the same number of variables. + if trajectories + .iter() + .any(|x| trajectories[0].get_events().shape()[1] != x.get_events().shape()[1]) + { + panic!("All the trajectories mus represents the same number of variables"); + } + Dataset { trajectories } + } + + pub fn get_trajectories(&self) -> &Vec { + &self.trajectories + } } pub fn trajectory_generator( @@ -21,25 +59,38 @@ pub fn trajectory_generator( t_end: f64, seed: Option, ) -> Dataset { - let mut dataset = Dataset { - trajectories: Vec::new(), + + //Tmp growing vector containing generated trajectories. + let mut trajectories: Vec = Vec::new(); + + //Random Generator object + let mut rng: ChaCha8Rng = match seed { + //If a seed is present use it to initialize the random generator. + Some(seed) => SeedableRng::seed_from_u64(seed), + //Otherwise create a new random generator using the method `from_entropy` + None => SeedableRng::from_entropy() }; - - let seed = seed.unwrap_or_else(rand::random); - - let mut rng = ChaCha8Rng::seed_from_u64(seed); - - let node_idx: Vec<_> = net.get_node_indices().collect(); + + //Each iteration generate one trajectory for _ in 0..n_trajectories { + //Current time of the sampling process let mut t = 0.0; + //History of all the moments in which something changed let mut time: Vec = Vec::new(); - let mut events: Vec> = Vec::new(); - let mut current_state: Vec = node_idx - .iter() - .map(|x| net.get_node(*x).params.get_random_state_uniform(&mut rng)) + //Configuration of the process variables at time t initialized with an uniform + //distribution. + let mut current_state: Vec = net.get_node_indices() + .map(|x| net.get_node(x).params.get_random_state_uniform(&mut rng)) .collect(); + //History of all the configurations of the process variables. + let mut events: Vec> = Vec::new(); + //Vector containing to time to the next transition for each variable. let mut next_transitions: Vec> = - (0..node_idx.len()).map(|_| Option::None).collect(); + net.get_node_indices().map(|_| Option::None).collect(); + + //Add the starting time for the trajectory. + time.push(t.clone()); + //Add the starting configuration of the trajectory. events.push( current_state .iter() @@ -48,8 +99,9 @@ pub fn trajectory_generator( }) .collect(), ); - time.push(t.clone()); + //Generate new samples until ending time is reached. while t < t_end { + //Generate the next transition time for each uninitialized variable. for (idx, val) in next_transitions.iter_mut().enumerate() { if let None = val { *val = Some( @@ -65,19 +117,24 @@ pub fn trajectory_generator( ); } } - + + //Get the variable with the smallest transition time. let next_node_transition = next_transitions .iter() .enumerate() .min_by(|x, y| x.1.unwrap().partial_cmp(&y.1.unwrap()).unwrap()) .unwrap() .0; + //Check if the next transition take place after the ending time. if next_transitions[next_node_transition].unwrap() > t_end { break; } + //Get the time in which the next transition occurs. t = next_transitions[next_node_transition].unwrap().clone(); + //Add the transition time to next time.push(t.clone()); - + + //Compute the new state of the transitioning variable. current_state[next_node_transition] = net .get_node(next_node_transition) .params @@ -89,7 +146,8 @@ pub fn trajectory_generator( &mut rng, ) .unwrap(); - + + //Add the new state to events events.push(Array::from_vec( current_state .iter() @@ -98,13 +156,16 @@ pub fn trajectory_generator( }) .collect(), )); + //Reset the next transition time for the transitioning node. next_transitions[next_node_transition] = None; + //Reset the next transition time for each child of the transitioning node. for child in net.get_children_set(next_node_transition) { next_transitions[child] = None } } - + + //Add current_state as last state. events.push( current_state .iter() @@ -113,16 +174,19 @@ pub fn trajectory_generator( }) .collect(), ); + //Add t_end as last time. time.push(t_end.clone()); - - dataset.trajectories.push(Trajectory { - time: Array::from_vec(time), - events: Array2::from_shape_vec( + + //Add the sampled trajectory to trajectories. + trajectories.push(Trajectory::init( + Array::from_vec(time), + Array2::from_shape_vec( (events.len(), current_state.len()), events.iter().flatten().cloned().collect(), ) .unwrap(), - }); + )); } - dataset + //Return a dataset object with the sampled trajectories. + Dataset::init(trajectories) } diff --git a/tests/structure_learning.rs b/tests/structure_learning.rs new file mode 100644 index 0000000..c3482cc --- /dev/null +++ b/tests/structure_learning.rs @@ -0,0 +1,260 @@ + +mod utils; +use utils::*; + +use rustyCTBN::ctbn::*; +use rustyCTBN::network::Network; +use rustyCTBN::tools::*; +use rustyCTBN::structure_learning::score_function::*; +use rustyCTBN::structure_learning::score_based_algorithm::*; +use rustyCTBN::structure_learning::StructureLearningAlgorithm; +use ndarray::{arr1, arr2, arr3}; +use std::collections::BTreeSet; +use rustyCTBN::params; + + +#[macro_use] +extern crate approx; + +#[test] +fn simple_score_test() { + let mut net = CtbnNetwork::init(); + let n1 = net + .add_node(generate_discrete_time_continous_node(String::from("n1"),2)) + .unwrap(); + + let trj = Trajectory::init( + arr1(&[0.0,0.1,0.3]), + arr2(&[[0],[1],[1]])); + + let dataset = Dataset::init(vec![trj]); + + let ll = LogLikelihood::init(1, 1.0); + + assert_abs_diff_eq!(0.04257, ll.call(&net, n1, &BTreeSet::new(), &dataset), epsilon=1e-3); + +} + + +#[test] +fn simple_bic() { + let mut net = CtbnNetwork::init(); + let n1 = net + .add_node(generate_discrete_time_continous_node(String::from("n1"),2)) + .unwrap(); + + let trj = Trajectory::init( + arr1(&[0.0,0.1,0.3]), + arr2(&[[0],[1],[1]])); + + let dataset = Dataset::init(vec![trj]); + let bic = BIC::init(1, 1.0); + + assert_abs_diff_eq!(-0.65058, bic.call(&net, n1, &BTreeSet::new(), &dataset), epsilon=1e-3); + +} + + + +fn check_compatibility_between_dataset_and_network (sl: T) { + let mut net = CtbnNetwork::init(); + let n1 = net + .add_node(generate_discrete_time_continous_node(String::from("n1"),3)) + .unwrap(); + let n2 = net + .add_node(generate_discrete_time_continous_node(String::from("n2"),3)) + .unwrap(); + net.add_edge(n1, n2); + + match &mut net.get_node_mut(n1).params { + params::Params::DiscreteStatesContinousTime(param) => { + assert_eq!(Ok(()), param.set_cim(arr3(&[[[-3.0, 2.0, 1.0], + [1.5, -2.0, 0.5], + [0.4, 0.6, -1.0]]]))); + } + } + + match &mut net.get_node_mut(n2).params { + params::Params::DiscreteStatesContinousTime(param) => { + assert_eq!(Ok(()), param.set_cim(arr3(&[ + [[-1.0, 0.5, 0.5], [3.0, -4.0, 1.0], [0.9, 0.1, -1.0]], + [[-6.0, 2.0, 4.0], [1.5, -2.0, 0.5], [3.0, 1.0, -4.0]], + [[-1.0, 0.1, 0.9], [2.0, -2.5, 0.5], [0.9, 0.1, -1.0]], + ]))); + } + } + + let data = trajectory_generator(&net, 100, 20.0, Some(6347747169756259),); + + let mut net = CtbnNetwork::init(); + let _n1 = net + .add_node(generate_discrete_time_continous_node(String::from("n1"),3)) + .unwrap(); + let net = sl.fit_transform(net, &data); +} + + +#[test] +#[should_panic] +pub fn check_compatibility_between_dataset_and_network_hill_climbing() { + let ll = LogLikelihood::init(1, 1.0); + let hl = HillClimbing::init(ll, None); + check_compatibility_between_dataset_and_network(hl); +} + +fn learn_ternary_net_2_nodes (sl: T) { + let mut net = CtbnNetwork::init(); + let n1 = net + .add_node(generate_discrete_time_continous_node(String::from("n1"),3)) + .unwrap(); + let n2 = net + .add_node(generate_discrete_time_continous_node(String::from("n2"),3)) + .unwrap(); + net.add_edge(n1, n2); + + match &mut net.get_node_mut(n1).params { + params::Params::DiscreteStatesContinousTime(param) => { + assert_eq!(Ok(()), param.set_cim(arr3(&[[[-3.0, 2.0, 1.0], + [1.5, -2.0, 0.5], + [0.4, 0.6, -1.0]]]))); + } + } + + match &mut net.get_node_mut(n2).params { + params::Params::DiscreteStatesContinousTime(param) => { + assert_eq!(Ok(()), param.set_cim(arr3(&[ + [[-1.0, 0.5, 0.5], [3.0, -4.0, 1.0], [0.9, 0.1, -1.0]], + [[-6.0, 2.0, 4.0], [1.5, -2.0, 0.5], [3.0, 1.0, -4.0]], + [[-1.0, 0.1, 0.9], [2.0, -2.5, 0.5], [0.9, 0.1, -1.0]], + ]))); + } + } + + let data = trajectory_generator(&net, 100, 20.0, Some(6347747169756259),); + + let net = sl.fit_transform(net, &data); + assert_eq!(BTreeSet::from_iter(vec![n1]), net.get_parent_set(n2)); + assert_eq!(BTreeSet::new(), net.get_parent_set(n1)); +} + + +#[test] +pub fn learn_ternary_net_2_nodes_hill_climbing_ll() { + let ll = LogLikelihood::init(1, 1.0); + let hl = HillClimbing::init(ll, None); + learn_ternary_net_2_nodes(hl); +} + +#[test] +pub fn learn_ternary_net_2_nodes_hill_climbing_bic() { + let bic = BIC::init(1, 1.0); + let hl = HillClimbing::init(bic, None); + learn_ternary_net_2_nodes(hl); +} + + +fn get_mixed_discrete_net_3_nodes_with_data() -> (CtbnNetwork, Dataset) { + let mut net = CtbnNetwork::init(); + let n1 = net + .add_node(generate_discrete_time_continous_node(String::from("n1"),3)) + .unwrap(); + let n2 = net + .add_node(generate_discrete_time_continous_node(String::from("n2"),3)) + .unwrap(); + + let n3 = net + .add_node(generate_discrete_time_continous_node(String::from("n3"),4)) + .unwrap(); + net.add_edge(n1, n2); + net.add_edge(n1, n3); + net.add_edge(n2, n3); + + match &mut net.get_node_mut(n1).params { + params::Params::DiscreteStatesContinousTime(param) => { + assert_eq!(Ok(()), param.set_cim(arr3(&[[[-3.0, 2.0, 1.0], + [1.5, -2.0, 0.5], + [0.4, 0.6, -1.0]]]))); + } + } + + match &mut net.get_node_mut(n2).params { + params::Params::DiscreteStatesContinousTime(param) => { + assert_eq!(Ok(()), param.set_cim(arr3(&[ + [[-1.0, 0.5, 0.5], [3.0, -4.0, 1.0], [0.9, 0.1, -1.0]], + [[-6.0, 2.0, 4.0], [1.5, -2.0, 0.5], [3.0, 1.0, -4.0]], + [[-1.0, 0.1, 0.9], [2.0, -2.5, 0.5], [0.9, 0.1, -1.0]], + ]))); + } + } + + + match &mut net.get_node_mut(n3).params { + params::Params::DiscreteStatesContinousTime(param) => { + assert_eq!(Ok(()), param.set_cim(arr3(&[ + [[-1.0, 0.5, 0.3, 0.2], [0.5, -4.0, 2.5, 1.0], [2.5, 0.5, -4.0, 1.0], [0.7, 0.2, 0.1, -1.0]], + [[-6.0, 2.0, 3.0, 1.0], [1.5, -3.0, 0.5, 1.0], [2.0, 1.3, -5.0 ,1.7], [2.5, 0.5, 1.0, -4.0]], + [[-1.3, 0.3, 0.1, 0.9], [1.4, -4.0, 0.5, 2.1], [1.0, 1.5, -3.0, 0.5], [0.4, 0.3, 0.1, -0.8]], + + [[-2.0, 1.0, 0.7, 0.3], [1.3, -5.9, 2.7, 1.9], [2.0, 1.5, -4.0, 0.5], [0.2, 0.7, 0.1, -1.0]], + [[-6.0, 1.0, 2.0, 3.0], [0.5, -3.0, 1.0, 1.5], [1.4, 2.1, -4.3, 0.8], [0.5, 1.0, 2.5, -4.0]], + [[-1.3, 0.9, 0.3, 0.1], [0.1, -1.3, 0.2, 1.0], [0.5, 1.0, -3.0, 1.5], [0.1, 0.4, 0.3, -0.8]], + + [[-2.0, 1.0, 0.6, 0.4], [2.6, -7.1, 1.4, 3.1], [5.0, 1.0, -8.0, 2.0], [1.4, 0.4, 0.2, -2.0]], + [[-3.0, 1.0, 1.5, 0.5], [3.0, -6.0, 1.0, 2.0], [0.3, 0.5, -1.9, 1.1], [5.0, 1.0, 2.0, -8.0]], + [[-2.6, 0.6, 0.2, 1.8], [2.0, -6.0, 3.0, 1.0], [0.1, 0.5, -1.3, 0.7], [0.8, 0.6, 0.2, -1.6]], + ]))); + } + } + + + let data = trajectory_generator(&net, 300, 30.0, Some(6347747169756259),); + return (net, data); +} + +fn learn_mixed_discrete_net_3_nodes (sl: T) { + let (net, data) = get_mixed_discrete_net_3_nodes_with_data(); + let net = sl.fit_transform(net, &data); + assert_eq!(BTreeSet::new(), net.get_parent_set(0)); + assert_eq!(BTreeSet::from_iter(vec![0]), net.get_parent_set(1)); + assert_eq!(BTreeSet::from_iter(vec![0, 1]), net.get_parent_set(2)); +} + + +#[test] +pub fn learn_mixed_discrete_net_3_nodes_hill_climbing_ll() { + let ll = LogLikelihood::init(1, 1.0); + let hl = HillClimbing::init(ll, None); + learn_mixed_discrete_net_3_nodes(hl); +} + +#[test] +pub fn learn_mixed_discrete_net_3_nodes_hill_climbing_bic() { + let bic = BIC::init(1, 1.0); + let hl = HillClimbing::init(bic, None); + learn_mixed_discrete_net_3_nodes(hl); +} + + + +fn learn_mixed_discrete_net_3_nodes_1_parent_constraint (sl: T) { + let (net, data) = get_mixed_discrete_net_3_nodes_with_data(); + let net = sl.fit_transform(net, &data); + assert_eq!(BTreeSet::new(), net.get_parent_set(0)); + assert_eq!(BTreeSet::from_iter(vec![0]), net.get_parent_set(1)); + assert_eq!(BTreeSet::from_iter(vec![0]), net.get_parent_set(2)); +} + + +#[test] +pub fn learn_mixed_discrete_net_3_nodes_hill_climbing_ll_1_parent_constraint() { + let ll = LogLikelihood::init(1, 1.0); + let hl = HillClimbing::init(ll, Some(1)); + learn_mixed_discrete_net_3_nodes_1_parent_constraint(hl); +} + +#[test] +pub fn learn_mixed_discrete_net_3_nodes_hill_climbing_bic_1_parent_constraint() { + let bic = BIC::init(1, 1.0); + let hl = HillClimbing::init(bic, Some(1)); + learn_mixed_discrete_net_3_nodes_1_parent_constraint(hl); +} diff --git a/tests/tools.rs b/tests/tools.rs index 76847ef..c341b8c 100644 --- a/tests/tools.rs +++ b/tests/tools.rs @@ -5,7 +5,7 @@ use rustyCTBN::ctbn::*; use rustyCTBN::node; use rustyCTBN::params; use std::collections::BTreeSet; -use ndarray::arr3; +use ndarray::{arr1, arr2, arr3}; @@ -38,8 +38,30 @@ fn run_sampling() { let data = trajectory_generator(&net, 4, 1.0, Some(6347747169756259),); - assert_eq!(4, data.trajectories.len()); - assert_relative_eq!(1.0, data.trajectories[0].time[data.trajectories[0].time.len()-1]); + assert_eq!(4, data.get_trajectories().len()); + assert_relative_eq!(1.0, data.get_trajectories()[0].get_time()[data.get_trajectories()[0].get_time().len()-1]); } +#[test] +#[should_panic] + fn trajectory_wrong_shape() { + let time = arr1(&[0.0, 0.2]); + let events = arr2(&[[0,3]]); + Trajectory::init(time, events); +} + + +#[test] +#[should_panic] +fn dataset_wrong_shape() { + let time = arr1(&[0.0, 0.2]); + let events = arr2(&[[0,3], [1,2]]); + let t1 = Trajectory::init(time, events); + + + let time = arr1(&[0.0, 0.2]); + let events = arr2(&[[0,3,3], [1,2,3]]); + let t2 = Trajectory::init(time, events); + Dataset::init(vec![t1, t2]); +}