Skip to content
12 changes: 6 additions & 6 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ opt-level = 3
[dependencies]
atomic_float = "1.1.0"
bitflags = "2.11.0"
genetic-rs = { version = "1.3.0", features = ["rayon"] }
genetic-rs = { version = "1.4.1", features = ["rayon"] }
lazy_static = "1.5.0"
rayon = "1.11.0"
replace_with = "0.1.8"
Expand Down
75 changes: 75 additions & 0 deletions src/neuralnet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -811,6 +811,35 @@ impl<const I: usize, const O: usize> NeuralNetwork<I, O> {
*w += amount;
});
}

/// Gets a set of all connections in the neural network.
/// Used for things like calculating divergence between neural networks during speciation.
pub fn edges_set(&self) -> HashSet<Connection> {
let mut edges = HashSet::new();

for (i, n) in self.input_layer.iter().enumerate() {
let from = NeuronLocation::Input(i);
for &to in n.outputs.keys() {
edges.insert(Connection { from, to });
}
}

for (i, n) in self.hidden_layers.iter().enumerate() {
let from = NeuronLocation::Hidden(i);
for &to in n.outputs.keys() {
edges.insert(Connection { from, to });
}
}

for (i, n) in self.output_layer.iter().enumerate() {
let from = NeuronLocation::Output(i);
for &to in n.outputs.keys() {
edges.insert(Connection { from, to });
}
}

edges
}
}

impl<const I: usize, const O: usize> Index<NeuronLocation> for NeuralNetwork<I, O> {
Expand Down Expand Up @@ -931,6 +960,7 @@ impl<const I: usize, const O: usize> RandomlyMutable for NeuralNetwork<I, O> {
}

/// The settings used for [`NeuralNetwork`] reproduction.
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug, Clone, PartialEq)]
pub struct ReproductionSettings {
/// The mutation settings to use during reproduction.
Expand Down Expand Up @@ -1042,6 +1072,51 @@ fn output_exists(loc: NeuronLocation, hidden_len: usize, output_len: usize) -> b
}
}

/// The weights for calculating divergence between two neural networks.
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct DivergenceWeights {
/// The weight for the symmetric difference of edges
pub edge: f32,

/// The weight for the difference in the number of hidden neurons.
pub node: f32,
}

impl Default for DivergenceWeights {
fn default() -> Self {
Self {
edge: 1.0,
node: 1.0,
}
}
}

impl<const I: usize, const O: usize> Speciated for NeuralNetwork<I, O> {
type Context = DivergenceWeights;

/// Divergence based on weighted inverse Jaccard similarity.
fn divergence(&self, other: &Self, ctx: &Self::Context) -> f32 {
let self_edges = self.edges_set();
let other_edges = other.edges_set();
// let total_edges = self_edges.union(&other_edges).count() as f32;

let edge_diff = self_edges.symmetric_difference(&other_edges).count() as f32;

let edge_term = ctx.edge * edge_diff; // / total_edges.max(1.0);

let node_diff = self.hidden_layers.len().abs_diff(other.hidden_layers.len()) as f32;
let node_term = ctx.node * node_diff;
// / self
// .hidden_layers
// .len()
// .max(other.hidden_layers.len())
// .max(1) as f32;

edge_term + node_term
}
}

/// A helper struct for operations on connections between neurons.
/// It does not contain information about the weight.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
Expand Down
Loading