Skip to content

Commit 3c3613b

Browse files
authored
Merge pull request dotnet#4 from Oceania2018/imageclassificationapi
fix crash issue when tesing tf.net.
2 parents 1231587 + 8fe76a0 commit 3c3613b

File tree

3 files changed

+16
-16
lines changed

3 files changed

+16
-16
lines changed

src/Microsoft.ML.Dnn/DnnTransform.cs

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -653,7 +653,7 @@ private void UpdateModelOnDisk(string modelDir, DnnEstimator.Options options)
653653
Tensor prediction = null;
654654
Tensor bottleneckTensor = evalGraph.OperationByName(BottleneckOperationName);
655655

656-
with(evalGraph.as_default(), graph =>
656+
tf_with(evalGraph.as_default(), graph =>
657657
{
658658
var (_, _, groundTruthInput, finalTensor) = AddFinalRetrainOps(classCount, options, bottleneckTensor, false);
659659
tf.train.Saver().restore(evalSess, Path.Combine(Directory.GetCurrentDirectory(), CheckpointPath));
@@ -668,15 +668,15 @@ private void UpdateModelOnDisk(string modelDir, DnnEstimator.Options options)
668668
Tensor evaluationStep = null;
669669
Tensor correctPrediction = null;
670670

671-
with(tf.name_scope("accuracy"), scope =>
671+
tf_with(tf.name_scope("accuracy"), scope =>
672672
{
673-
with(tf.name_scope("correct_prediction"), delegate
673+
tf_with(tf.name_scope("correct_prediction"), delegate
674674
{
675675
Prediction = tf.argmax(resultTensor, 1);
676676
correctPrediction = tf.equal(Prediction, groundTruthTensor);
677677
});
678678

679-
with(tf.name_scope("accuracy"), delegate
679+
tf_with(tf.name_scope("accuracy"), delegate
680680
{
681681
evaluationStep = tf.reduce_mean(tf.cast(correctPrediction, tf.float32));
682682
});
@@ -697,12 +697,12 @@ private void UpdateTransferLearningModelOnDisk(DnnEstimator.Options options, int
697697

698698
private void VariableSummaries(RefVariable var)
699699
{
700-
with(tf.name_scope("summaries"), delegate
700+
tf_with(tf.name_scope("summaries"), delegate
701701
{
702702
var mean = tf.reduce_mean(var);
703703
tf.summary.scalar("mean", mean);
704704
Tensor stddev = null;
705-
with(tf.name_scope("stddev"), delegate
705+
tf_with(tf.name_scope("stddev"), delegate
706706
{
707707
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)));
708708
});
@@ -717,31 +717,31 @@ private void VariableSummaries(RefVariable var)
717717
DnnEstimator.Options options, Tensor bottleneckTensor, bool isTraining)
718718
{
719719
var (batch_size, bottleneck_tensor_size) = (bottleneckTensor.TensorShape.Dimensions[0], bottleneckTensor.TensorShape.Dimensions[1]);
720-
with(tf.name_scope("input"), scope =>
720+
tf_with(tf.name_scope("input"), scope =>
721721
{
722722
LabelTensor = tf.placeholder(tf.int64, new TensorShape(batch_size), name: options.LabelColumn);
723723
});
724724

725725
string layerName = "final_retrain_ops";
726726
Tensor logits = null;
727-
with(tf.name_scope(layerName), scope =>
727+
tf_with(tf.name_scope(layerName), scope =>
728728
{
729729
RefVariable layerWeights = null;
730-
with(tf.name_scope("weights"), delegate
730+
tf_with(tf.name_scope("weights"), delegate
731731
{
732732
var initialValue = tf.truncated_normal(new int[] { bottleneck_tensor_size, classCount }, stddev: 0.001f);
733733
layerWeights = tf.Variable(initialValue, name: "final_weights");
734734
VariableSummaries(layerWeights);
735735
});
736736

737737
RefVariable layerBiases = null;
738-
with(tf.name_scope("biases"), delegate
738+
tf_with(tf.name_scope("biases"), delegate
739739
{
740740
layerBiases = tf.Variable(tf.zeros(classCount), name: "final_biases");
741741
VariableSummaries(layerBiases);
742742
});
743743

744-
with(tf.name_scope("Wx_plus_b"), delegate
744+
tf_with(tf.name_scope("Wx_plus_b"), delegate
745745
{
746746
logits = tf.matmul(bottleneckTensor, layerWeights) + layerBiases;
747747
tf.summary.histogram("pre_activations", logits);
@@ -755,15 +755,15 @@ private void VariableSummaries(RefVariable var)
755755
return (null, null, LabelTensor, SoftMaxTensor);
756756

757757
Tensor crossEntropyMean = null;
758-
with(tf.name_scope("cross_entropy"), delegate
758+
tf_with(tf.name_scope("cross_entropy"), delegate
759759
{
760760
crossEntropyMean = tf.losses.sparse_softmax_cross_entropy(
761761
labels: LabelTensor, logits: logits);
762762
});
763763

764764
tf.summary.scalar("cross_entropy", crossEntropyMean);
765765

766-
with(tf.name_scope("train"), delegate
766+
tf_with(tf.name_scope("train"), delegate
767767
{
768768
var optimizer = tf.train.GradientDescentOptimizer(options.LearningRate);
769769
TrainStep = optimizer.minimize(crossEntropyMean);
@@ -775,7 +775,7 @@ private void VariableSummaries(RefVariable var)
775775
private void AddTransferLearningLayer(DnnEstimator.Options options, int classCount)
776776
{
777777
BottleneckTensor = Graph.OperationByName(BottleneckOperationName);
778-
with(Graph.as_default(), delegate
778+
tf_with(Graph.as_default(), delegate
779779
{
780780
(TrainStep, CrossEntropy, LabelTensor, SoftMaxTensor) =
781781
AddFinalRetrainOps(classCount, options, BottleneckTensor, true);

src/Microsoft.ML.Dnn/DnnUtils.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ internal static Session LoadTFSession(IExceptionContext ectx, byte[] modelBytes,
9292

9393
internal static Graph LoadMetaGraph(string path)
9494
{
95-
return with(tf.Graph().as_default(), graph =>
95+
return tf_with(tf.Graph().as_default(), graph =>
9696
{
9797
tf.train.import_meta_graph(path);
9898
return graph;

src/Microsoft.ML.Dnn/Microsoft.ML.Dnn.csproj

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
<ItemGroup>
1616
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="$(SystemIOFileSystemAccessControl)" />
1717
<PackageReference Include="System.Security.Principal.Windows" Version="$(SystemSecurityPrincipalWindows)" />
18-
<PackageReference Include="TensorFlow.NET" Version="0.10.7.3" />
18+
<PackageReference Include="TensorFlow.NET" Version="0.10.8" />
1919
</ItemGroup>
2020

2121
<ItemGroup>

0 commit comments

Comments
 (0)