@@ -653,7 +653,7 @@ private void UpdateModelOnDisk(string modelDir, DnnEstimator.Options options)
653
653
Tensor prediction = null ;
654
654
Tensor bottleneckTensor = evalGraph . OperationByName ( BottleneckOperationName ) ;
655
655
656
- with ( evalGraph . as_default ( ) , graph =>
656
+ tf_with ( evalGraph . as_default ( ) , graph =>
657
657
{
658
658
var ( _, _, groundTruthInput , finalTensor ) = AddFinalRetrainOps ( classCount , options , bottleneckTensor , false ) ;
659
659
tf . train . Saver ( ) . restore ( evalSess , Path . Combine ( Directory . GetCurrentDirectory ( ) , CheckpointPath ) ) ;
@@ -668,15 +668,15 @@ private void UpdateModelOnDisk(string modelDir, DnnEstimator.Options options)
668
668
Tensor evaluationStep = null ;
669
669
Tensor correctPrediction = null ;
670
670
671
- with ( tf . name_scope ( "accuracy" ) , scope =>
671
+ tf_with ( tf . name_scope ( "accuracy" ) , scope =>
672
672
{
673
- with ( tf . name_scope ( "correct_prediction" ) , delegate
673
+ tf_with ( tf . name_scope ( "correct_prediction" ) , delegate
674
674
{
675
675
Prediction = tf . argmax ( resultTensor , 1 ) ;
676
676
correctPrediction = tf . equal ( Prediction , groundTruthTensor ) ;
677
677
} ) ;
678
678
679
- with ( tf . name_scope ( "accuracy" ) , delegate
679
+ tf_with ( tf . name_scope ( "accuracy" ) , delegate
680
680
{
681
681
evaluationStep = tf . reduce_mean ( tf . cast ( correctPrediction , tf . float32 ) ) ;
682
682
} ) ;
@@ -697,12 +697,12 @@ private void UpdateTransferLearningModelOnDisk(DnnEstimator.Options options, int
697
697
698
698
private void VariableSummaries ( RefVariable var )
699
699
{
700
- with ( tf . name_scope ( "summaries" ) , delegate
700
+ tf_with ( tf . name_scope ( "summaries" ) , delegate
701
701
{
702
702
var mean = tf . reduce_mean ( var ) ;
703
703
tf . summary . scalar ( "mean" , mean ) ;
704
704
Tensor stddev = null ;
705
- with ( tf . name_scope ( "stddev" ) , delegate
705
+ tf_with ( tf . name_scope ( "stddev" ) , delegate
706
706
{
707
707
stddev = tf . sqrt ( tf . reduce_mean ( tf . square ( var - mean ) ) ) ;
708
708
} ) ;
@@ -717,31 +717,31 @@ private void VariableSummaries(RefVariable var)
717
717
DnnEstimator . Options options , Tensor bottleneckTensor , bool isTraining )
718
718
{
719
719
var ( batch_size , bottleneck_tensor_size ) = ( bottleneckTensor . TensorShape . Dimensions [ 0 ] , bottleneckTensor . TensorShape . Dimensions [ 1 ] ) ;
720
- with ( tf . name_scope ( "input" ) , scope =>
720
+ tf_with ( tf . name_scope ( "input" ) , scope =>
721
721
{
722
722
LabelTensor = tf . placeholder ( tf . int64 , new TensorShape ( batch_size ) , name : options . LabelColumn ) ;
723
723
} ) ;
724
724
725
725
string layerName = "final_retrain_ops" ;
726
726
Tensor logits = null ;
727
- with ( tf . name_scope ( layerName ) , scope =>
727
+ tf_with ( tf . name_scope ( layerName ) , scope =>
728
728
{
729
729
RefVariable layerWeights = null ;
730
- with ( tf . name_scope ( "weights" ) , delegate
730
+ tf_with ( tf . name_scope ( "weights" ) , delegate
731
731
{
732
732
var initialValue = tf . truncated_normal ( new int [ ] { bottleneck_tensor_size , classCount } , stddev : 0.001f ) ;
733
733
layerWeights = tf . Variable ( initialValue , name : "final_weights" ) ;
734
734
VariableSummaries ( layerWeights ) ;
735
735
} ) ;
736
736
737
737
RefVariable layerBiases = null ;
738
- with ( tf . name_scope ( "biases" ) , delegate
738
+ tf_with ( tf . name_scope ( "biases" ) , delegate
739
739
{
740
740
layerBiases = tf . Variable ( tf . zeros ( classCount ) , name : "final_biases" ) ;
741
741
VariableSummaries ( layerBiases ) ;
742
742
} ) ;
743
743
744
- with ( tf . name_scope ( "Wx_plus_b" ) , delegate
744
+ tf_with ( tf . name_scope ( "Wx_plus_b" ) , delegate
745
745
{
746
746
logits = tf . matmul ( bottleneckTensor , layerWeights ) + layerBiases ;
747
747
tf . summary . histogram ( "pre_activations" , logits ) ;
@@ -755,15 +755,15 @@ private void VariableSummaries(RefVariable var)
755
755
return ( null , null , LabelTensor , SoftMaxTensor ) ;
756
756
757
757
Tensor crossEntropyMean = null ;
758
- with ( tf . name_scope ( "cross_entropy" ) , delegate
758
+ tf_with ( tf . name_scope ( "cross_entropy" ) , delegate
759
759
{
760
760
crossEntropyMean = tf . losses . sparse_softmax_cross_entropy (
761
761
labels : LabelTensor , logits : logits ) ;
762
762
} ) ;
763
763
764
764
tf . summary . scalar ( "cross_entropy" , crossEntropyMean ) ;
765
765
766
- with ( tf . name_scope ( "train" ) , delegate
766
+ tf_with ( tf . name_scope ( "train" ) , delegate
767
767
{
768
768
var optimizer = tf . train . GradientDescentOptimizer ( options . LearningRate ) ;
769
769
TrainStep = optimizer . minimize ( crossEntropyMean ) ;
@@ -775,7 +775,7 @@ private void VariableSummaries(RefVariable var)
775
775
private void AddTransferLearningLayer ( DnnEstimator . Options options , int classCount )
776
776
{
777
777
BottleneckTensor = Graph . OperationByName ( BottleneckOperationName ) ;
778
- with ( Graph . as_default ( ) , delegate
778
+ tf_with ( Graph . as_default ( ) , delegate
779
779
{
780
780
( TrainStep , CrossEntropy , LabelTensor , SoftMaxTensor ) =
781
781
AddFinalRetrainOps ( classCount , options , BottleneckTensor , true ) ;
0 commit comments