Skip to content

Commit e2e9c36

Browse files
nhirscheyEsther2013
authored andcommitted
use open type for F# readme example
This is the proper way to mimic the C# `using static` code.
1 parent 143714d commit e2e9c36

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

README.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -169,12 +169,12 @@ Linear Regression in `Eager` mode:
169169
#r "nuget: SciSharp.TensorFlow.Redist"
170170
#r "nuget: NumSharp"
171171
172-
open System
173172
open NumSharp
174173
open Tensorflow
175-
open Tensorflow.Keras
174+
open type Tensorflow.Binding
175+
open type Tensorflow.KerasApi
176176
177-
let tf = Binding.New<tensorflow>()
177+
let tf = New<tensorflow>()
178178
tf.enable_eager_execution()
179179
180180
// Parameters
@@ -194,7 +194,7 @@ let n_samples = train_X.shape.[0]
194194
// We can set a fixed init value in order to demo
195195
let W = tf.Variable(-0.06f,name = "weight")
196196
let b = tf.Variable(-0.73f, name = "bias")
197-
let optimizer = KerasApi.keras.optimizers.SGD(learning_rate)
197+
let optimizer = keras.optimizers.SGD(learning_rate)
198198
199199
// Run training for the given number of steps.
200200
for step = 1 to (training_steps + 1) do
@@ -210,7 +210,7 @@ for step = 1 to (training_steps + 1) do
210210
let gradients = g.gradient(loss,struct (W,b))
211211
212212
// Update W and b following gradients.
213-
optimizer.apply_gradients(Binding.zip(gradients, struct (W,b)))
213+
optimizer.apply_gradients(zip(gradients, struct (W,b)))
214214
215215
if (step % display_step) = 0 then
216216
let pred = W * train_X + b

0 commit comments

Comments
 (0)