diff --git a/Cargo.lock b/Cargo.lock index b6010d8..83c3713 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -125,9 +125,9 @@ checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "genetic-rs" -version = "1.3.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcda85cc7ffaa6c34c6a603de4b47a81c399b26fa825e4866de57f3c5b184b53" +checksum = "91ff84315b42aa07d69d5d0544403f4d56791824b4e0e10420ca6c3fcaff193d" dependencies = [ "genetic-rs-common", "genetic-rs-macros", @@ -135,9 +135,9 @@ dependencies = [ [[package]] name = "genetic-rs-common" -version = "1.3.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9824e028a96d1b962aa2c300457ff3fc012066f12872db58e65475f57fa41ee" +checksum = "93606635ba093802487db62ca9b3ffd9762946fd2fa2fc1461fc6f1342f8a6cd" dependencies = [ "itertools", "rand", @@ -146,9 +146,9 @@ dependencies = [ [[package]] name = "genetic-rs-macros" -version = "1.3.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41691333bc965d711879ec409130ec42c5dbb28e8cdd79947785fc6420853d6d" +checksum = "909a36b32f9d8361c56ef2a76dc905484698e32f4fe6ed55c36dde182be81f84" dependencies = [ "darling", "genetic-rs-common", diff --git a/Cargo.toml b/Cargo.toml index 94fa28e..0d849de 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,7 +42,7 @@ opt-level = 3 [dependencies] atomic_float = "1.1.0" bitflags = "2.11.0" -genetic-rs = { version = "1.3.0", features = ["rayon"] } +genetic-rs = { version = "1.4.1", features = ["rayon"] } lazy_static = "1.5.0" rayon = "1.11.0" replace_with = "0.1.8" diff --git a/src/neuralnet.rs b/src/neuralnet.rs index 5dffd9b..2b11f91 100644 --- a/src/neuralnet.rs +++ b/src/neuralnet.rs @@ -811,6 +811,35 @@ impl NeuralNetwork { *w += amount; }); } + + /// Gets a set of all connections in the neural network. + /// Used for things like calculating divergence between neural networks during speciation. + pub fn edges_set(&self) -> HashSet { + let mut edges = HashSet::new(); + + for (i, n) in self.input_layer.iter().enumerate() { + let from = NeuronLocation::Input(i); + for &to in n.outputs.keys() { + edges.insert(Connection { from, to }); + } + } + + for (i, n) in self.hidden_layers.iter().enumerate() { + let from = NeuronLocation::Hidden(i); + for &to in n.outputs.keys() { + edges.insert(Connection { from, to }); + } + } + + for (i, n) in self.output_layer.iter().enumerate() { + let from = NeuronLocation::Output(i); + for &to in n.outputs.keys() { + edges.insert(Connection { from, to }); + } + } + + edges + } } impl Index for NeuralNetwork { @@ -931,6 +960,7 @@ impl RandomlyMutable for NeuralNetwork { } /// The settings used for [`NeuralNetwork`] reproduction. +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Debug, Clone, PartialEq)] pub struct ReproductionSettings { /// The mutation settings to use during reproduction. @@ -1042,6 +1072,51 @@ fn output_exists(loc: NeuronLocation, hidden_len: usize, output_len: usize) -> b } } +/// The weights for calculating divergence between two neural networks. +#[derive(Debug, Clone, PartialEq)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +pub struct DivergenceWeights { + /// The weight for the symmetric difference of edges + pub edge: f32, + + /// The weight for the difference in the number of hidden neurons. + pub node: f32, +} + +impl Default for DivergenceWeights { + fn default() -> Self { + Self { + edge: 1.0, + node: 1.0, + } + } +} + +impl Speciated for NeuralNetwork { + type Context = DivergenceWeights; + + /// Divergence based on weighted inverse Jaccard similarity. + fn divergence(&self, other: &Self, ctx: &Self::Context) -> f32 { + let self_edges = self.edges_set(); + let other_edges = other.edges_set(); + // let total_edges = self_edges.union(&other_edges).count() as f32; + + let edge_diff = self_edges.symmetric_difference(&other_edges).count() as f32; + + let edge_term = ctx.edge * edge_diff; // / total_edges.max(1.0); + + let node_diff = self.hidden_layers.len().abs_diff(other.hidden_layers.len()) as f32; + let node_term = ctx.node * node_diff; + // / self + // .hidden_layers + // .len() + // .max(other.hidden_layers.len()) + // .max(1) as f32; + + edge_term + node_term + } +} + /// A helper struct for operations on connections between neurons. /// It does not contain information about the weight. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]