Re: CNN and updating perceptron

71 views
Skip to first unread message

Adam Crume

unread,
May 28, 2020, 12:03:56 AM5/28/20
to Maxime Girard, Rust for TensorFlow
It mostly looks fine, except that

run_args.add_feed(
    &self.weight_layer_1.variable.initializer(),
    1,
    &input_tensor,
);


won't assign a value to the variable.  You need to do something like:

let w1: &Variable = ...;
let w1_placeholder = ops::Placeholder::new()
    .dtype(w1.data_type())
    .shape(w1.shape())
    .build(&mut scope.with_op_name("w1_placeholder"))?;
let w1_assign = ops::assign(w1.output().clone(), w1_placeholder.clone().into(), scope)?;


and

let new_value: &Tensor = ...;
let mut run_args = SessionRunArgs::new();
run_args.add_feed(&w1_placeholder, 0, new_value);
run_args.add_target(&w1_assign);
session.run(&mut run_args)?;



A couple of other thoughts:
- You might want to create a NoOp with control dependencies on the variable initializers, so you can initialize all variables to default values by running a single op.
- The loop setting input_tensor to zero isn't necessary because tensors default to zero.


On Mon, May 18, 2020 at 3:29 PM Maxime Girard <maxime.g...@gmail.com> wrote:
Hey everyone !

I'm currently trying to implement an Genetical algorithm in front to made a unsupervised machine learning for a snake (You know the old game on cellphone).

I'm just wondering if I made the things right since I read this topic : https://groups.google.com/a/tensorflow.org/forum/#!topic/rust/PvhWZn1LGqE

Cause actually, I didn't right my NN with this method. In the xor.rs example in the repository ( ie : https://github.com/tensorflow/rust/blob/master/examples/xor.rs ), I follow the manner to do layer with the layer method and give the good operation to do in variable. Like a remu, tanh or sigmoid in my neuron.

So my code look like this :


struct Bias {
    variable: Variable,
}

struct Weight {
    variable: Variable,
}

fn layer<O1: Into<Output>>(
    input: O1,
    input_size: u64,
    output_size: u64,
    activation: &dyn Fn(Output, &mut Scope) -> Result<Output, Status>,
    scope: &mut Scope,
) -> Result<(Weight, Bias, Output), Status> {
    let mut scope = scope.new_sub_scope("layer");
    let scope = &mut scope;
    let w_shape = ops::constant(&[input_size as i64, output_size as i64][..], scope)?;
    let w = Variable::builder()
        .initial_value(
            ops::RandomStandardNormal::new()
                .dtype(DataType::Float)
                .build(w_shape.into(), scope)?,
        )
        .data_type(DataType::Float)
        .shape(Shape::from(&[input_size, output_size][..]))
        .build(&mut scope.with_op_name("w"))?;
    let b = Variable::builder()
        .const_initial_value(Tensor::<f32>::new(&[output_size]))
        .build(&mut scope.with_op_name("b"))?;
    Ok((
        Weight {
            variable: w.clone(),
        },
        Bias {
            variable: b.clone(),
        },
        activation(
            ops::add(
                ops::mat_mul(input.into(), w.output().clone(), scope)?.into(),
                b.output().clone(),
                scope,
            )?
            .into(),
            scope,
        )?,
    ))
}


    // in another scope...
let mut scope = Scope::new_root_scope();
        // Input layer :
        let input: Operation = ops::Placeholder::new()
            .dtype(DataType::Float)
            .shape(Shape::from(&[1u6432][..]))
            .build(&mut scope.with_op_name("input"))?;
        // Hidden layer.
        let (weight_layer_1, bias_layer_1, layer1) = layer(
            input.clone(),
            32,
            20,
            &|x, scope| Ok(ops::relu(x, scope)?.into()),
            &mut scope,
        )?;
        // Hidden layer.
        let (weight_layer_2, bias_layer_2, layer2) = layer(
            layer1.clone(),
            20,
            12,
            &|x, scope| Ok(ops::relu(x, scope)?.into()),
            &mut scope,
        )?;
        // Output layer.
        let (weight_layer_out, bias_layer_out, layer_output) = layer(
            layer2.clone(),
            12,
            4,
            &|x, scope| Ok(ops::sigmoid(x, scope)?.into()),
            &mut scope,
        )?;

        // Initialize variables :
        let options = SessionOptions::new();
        let g = scope.graph_mut();
        let session = Session::new(&options, &g)?;
        let mut run_args = SessionRunArgs::new();
        run_args.add_target(&weight_layer_1.variable.initializer());
        run_args.add_target(&weight_layer_2.variable.initializer());
        run_args.add_target(&weight_layer_out.variable.initializer());
        run_args.add_target(&bias_layer_1.variable.initializer());
        run_args.add_target(&bias_layer_2.variable.initializer());
        run_args.add_target(&bias_layer_out.variable.initializer());

        session.run(&mut run_args)?;


My first question is : Am I doing the things right ?

My question depends if my first question is right so I'm going to hope that the answer is a yes.

Whet is the good way to update bias and weight in Variable from the hidden layer ?

I'm trying something like this without success :


    fn update_variable(&mut self) -> Result<[f32; 4], Box<dyn Error>> {
        let mut input_tensor = Tensor::<f32>::new(&[120]);
        let mut run_args = SessionRunArgs::new();
        for index in 0..20 {
            input_tensor[index] = 0.0;
        }
        run_args.add_feed(
            &self.weight_layer_1.variable.initializer(),
            1,
            &input_tensor,
        );

        let result_token = run_args.request_fetch(&self.output.operation, self.output.index);
        self.session.run(&mut run_args)?;

        let result_tensor: Tensor<f32> = run_args.fetch::<f32>(result_token)?;
        Ok([
            result_tensor.get(&[00]),
            result_tensor.get(&[01]),
            result_tensor.get(&[02]),
            result_tensor.get(&[03]),
        ])
    }

I try a lot of things but I'm still be lost... Can you help me ?

Thx !

++

Maxime.

--
You received this message because you are subscribed to the Google Groups "Rust for TensorFlow" group.
To unsubscribe from this group and stop receiving emails from it, send an email to rust+uns...@tensorflow.org.
To view this discussion on the web visit https://groups.google.com/a/tensorflow.org/d/msgid/rust/d9d65a90-ae94-45d7-8798-f03e580c32b7%40tensorflow.org.

Maxime Girard

unread,
Jun 8, 2020, 6:03:31 PM6/8/20
to Rust for TensorFlow, maxime.g...@gmail.com
Just see your answer two days ago.

Thanks a lot Adam ! It's work !

You unblock me :)

++

Maxime.

On Thursday, May 28, 2020 at 6:03:56 AM UTC+2, Adam Crume wrote:
It mostly looks fine, except that

run_args.add_feed(
    &self.weight_layer_1.variable.initializer(),
    1,
    &input_tensor,
);


won't assign a value to the variable.  You need to do something like:

let w1: &Variable = ...;
let w1_placeholder = ops::Placeholder::new()
    .dtype(w1.data_type())
    .shape(w1.shape())
    .build(&mut scope.with_op_name("w1_placeholder"))?;
let w1_assign = ops::assign(w1.output().clone(), w1_placeholder.clone().into(), scope)?;


and

let new_value: &Tensor = ...;
let mut run_args = SessionRunArgs::new();
run_args.add_feed(&w1_placeholder, 0, new_value);
run_args.add_target(&w1_assign);
session.run(&mut run_args)?;



A couple of other thoughts:
- You might want to create a NoOp with control dependencies on the variable initializers, so you can initialize all variables to default values by running a single op.
- The loop setting input_tensor to zero isn't necessary because tensors default to zero.


To unsubscribe from this group and stop receiving emails from it, send an email to ru...@tensorflow.org.
Reply all
Reply to author
Forward
0 new messages