Skip to content

Commit cbe7c33

Browse files
EnricoMimigueldeicaza
authored andcommitted
fixing broken merges to master (migueldeicaza#460)
- adding AdamOptimizer (migueldeicaza#435) conflicted with fixing unit tests (migueldeicaza#448) - adding CreateTensorFromImageFileAlt function (migueldeicaza#421) was missing the System.Drawing reference
1 parent b54bc4c commit cbe7c33

File tree

4 files changed

+15
-4
lines changed

4 files changed

+15
-4
lines changed

Examples/ExampleCommon/ExampleCommon.csproj

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
<ItemGroup>
3737
<Reference Include="System" />
3838
<Reference Include="System.Core" />
39+
<Reference Include="System.Drawing" />
3940
<Reference Include="System.Xml.Linq" />
4041
<Reference Include="System.Data.DataSetExtensions" />
4142
<Reference Include="Microsoft.CSharp" />

Examples/ExampleCommon/ImageUtil.cs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
1-
using System.IO;
1+
using System.Drawing;
2+
using System.Drawing.Imaging;
3+
using System.IO;
24
using TensorFlow;
35

46
namespace ExampleCommon

TensorFlowSharp/Optimizer.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -368,7 +368,7 @@ public override TFOperation[] ApplyGradient((TFOutput gradient, Variable variabl
368368
for (int i = 0; i < gradientsAndVariables.Length; i++)
369369
{
370370
var gv = gradientsAndVariables[i];
371-
var lr = _graph.Cast(LearningRate.Read, gv.gradient.OutputType);
371+
var lr = _graph.Cast(LearningRate, gv.gradient.OutputType);
372372
var one = _graph.Const(1f);
373373

374374
var t = _graph.Cast(Iterations.Read, _beta1.OutputType);

tests/TensorFlowSharp.Tests.CSharp/OptimizerTests.cs

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -854,6 +854,10 @@ public void LinearRegresionTrainingWithAdamOptimizerTest()
854854
var sgd = new AdamOptimizer(graph, learning_rate);
855855
var updateOps = sgd.Minimize(cost);
856856

857+
var iter = sgd.Iterations.ReadAfter (updateOps);
858+
var readW = W.ReadAfter (updateOps);
859+
var readb = b.ReadAfter (updateOps);
860+
857861
using (var sesssion = new TFSession(graph))
858862
{
859863
sesssion.GetRunner().AddTarget(graph.GetGlobalVariablesInitializer()).Run();
@@ -866,7 +870,7 @@ public void LinearRegresionTrainingWithAdamOptimizerTest()
866870
var tensors = sesssion.GetRunner()
867871
.AddInput(X, new TFTensor(train_x[j]))
868872
.AddInput(Y, new TFTensor(train_y[j]))
869-
.AddTarget(updateOps).Fetch(cost, W.Read, b.Read, pred).Run();
873+
.AddTarget(updateOps).Fetch(cost, readW, readb, pred).Run();
870874
var output = $"loss: {tensors[0].GetValue():F4}, W: {tensors[1].GetValue():F4}, b: {tensors[2].GetValue():F4}";
871875
Assert.Equal(expectedLines[i * n_samples + j], output);
872876
}
@@ -910,6 +914,10 @@ public void LinearRegresionTrainingWithAdamOptimizerDecayTest()
910914
var sgd = new AdamOptimizer(graph, learning_rate, decay: 0.5f);
911915
var updateOps = sgd.Minimize(cost);
912916

917+
var iter = sgd.Iterations.ReadAfter (updateOps);
918+
var readW = W.ReadAfter (updateOps);
919+
var readb = b.ReadAfter (updateOps);
920+
913921
using (var sesssion = new TFSession(graph))
914922
{
915923
sesssion.GetRunner().AddTarget(graph.GetGlobalVariablesInitializer()).Run();
@@ -922,7 +930,7 @@ public void LinearRegresionTrainingWithAdamOptimizerDecayTest()
922930
var tensors = sesssion.GetRunner()
923931
.AddInput(X, new TFTensor(train_x[j]))
924932
.AddInput(Y, new TFTensor(train_y[j]))
925-
.AddTarget(updateOps).Fetch(cost, W.Read, b.Read, pred).Run();
933+
.AddTarget(updateOps).Fetch(cost, readW, readb, pred).Run();
926934
var output = $"loss: {tensors[0].GetValue():F4}, W: {tensors[1].GetValue():F4}, b: {tensors[2].GetValue():F4}";
927935
Assert.Equal(expectedLines[i * n_samples + j], output);
928936
}

0 commit comments

Comments
 (0)