I'm currently trying to implement an Genetical algorithm in front to made a unsupervised machine learning for a snake (You know the old game on cellphone).
struct Bias {
variable: Variable,
}
struct Weight {
variable: Variable,
}
fn layer<O1: Into<Output>>(
input: O1,
input_size: u64,
output_size: u64,
activation: &dyn Fn(Output, &mut Scope) -> Result<Output, Status>,
scope: &mut Scope,
) -> Result<(Weight, Bias, Output), Status> {
let mut scope = scope.new_sub_scope("layer");
let scope = &mut scope;
let w_shape = ops::constant(&[input_size as i64, output_size as i64][..], scope)?;
let w = Variable::builder()
.initial_value(
ops::RandomStandardNormal::new()
.dtype(DataType::Float)
.build(w_shape.into(), scope)?,
)
.data_type(DataType::Float)
.shape(Shape::from(&[input_size, output_size][..]))
.build(&mut scope.with_op_name("w"))?;
let b = Variable::builder()
.const_initial_value(Tensor::<f32>::new(&[output_size]))
.build(&mut scope.with_op_name("b"))?;
Ok((
Weight {
variable: w.clone(),
},
Bias {
variable: b.clone(),
},
activation(
ops::add(
ops::mat_mul(input.into(), w.output().clone(), scope)?.into(),
b.output().clone(),
scope,
)?
.into(),
scope,
)?,
))
}
// in another scope...
let mut scope = Scope::new_root_scope();
// Input layer :
let input: Operation = ops::Placeholder::new()
.dtype(DataType::Float)
.shape(Shape::from(&[1u64, 32][..]))
.build(&mut scope.with_op_name("input"))?;
// Hidden layer.
let (weight_layer_1, bias_layer_1, layer1) = layer(
input.clone(),
32,
20,
&|x, scope| Ok(ops::relu(x, scope)?.into()),
&mut scope,
)?;
// Hidden layer.
let (weight_layer_2, bias_layer_2, layer2) = layer(
layer1.clone(),
20,
12,
&|x, scope| Ok(ops::relu(x, scope)?.into()),
&mut scope,
)?;
// Output layer.
let (weight_layer_out, bias_layer_out, layer_output) = layer(
layer2.clone(),
12,
4,
&|x, scope| Ok(ops::sigmoid(x, scope)?.into()),
&mut scope,
)?;
// Initialize variables :
let options = SessionOptions::new();
let g = scope.graph_mut();
let session = Session::new(&options, &g)?;
let mut run_args = SessionRunArgs::new();
run_args.add_target(&weight_layer_1.variable.initializer());
run_args.add_target(&weight_layer_2.variable.initializer());
run_args.add_target(&weight_layer_out.variable.initializer());
run_args.add_target(&bias_layer_1.variable.initializer());
run_args.add_target(&bias_layer_2.variable.initializer());
run_args.add_target(&bias_layer_out.variable.initializer());
session.run(&mut run_args)?;
My question depends if my first question is right so I'm going to hope that the answer is a yes.
Whet is the good way to update bias and weight in Variable from the hidden layer ?
fn update_variable(&mut self) -> Result<[f32; 4], Box<dyn Error>> {
let mut input_tensor = Tensor::<f32>::new(&[1, 20]);
let mut run_args = SessionRunArgs::new();
for index in 0..20 {
input_tensor[index] = 0.0;
}
run_args.add_feed(
&self.weight_layer_1.variable.initializer(),
1,
&input_tensor,
);
let result_token = run_args.request_fetch(&self.output.operation, self.output.index);
self.session.run(&mut run_args)?;
let result_tensor: Tensor<f32> = run_args.fetch::<f32>(result_token)?;
Ok([
result_tensor.get(&[0, 0]),
result_tensor.get(&[0, 1]),
result_tensor.get(&[0, 2]),
result_tensor.get(&[0, 3]),
])
}
I try a lot of things but I'm still be lost... Can you help me ?
Maxime.