@@ -103,7 +103,7 @@ public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataV
103
103
var uniqueSourceNames = NgramExtractionUtils . GenerateUniqueSourceNames ( h , args . Column , view . Schema ) ;
104
104
Contracts . Assert ( uniqueSourceNames . Length == args . Column . Length ) ;
105
105
106
- var tokenizeColumns = new WordTokenizeTransform . ColumnInfo [ args . Column . Length ] ;
106
+ var tokenizeColumns = new List < WordTokenizeTransform . ColumnInfo > ( ) ;
107
107
var extractorCols = new NgramHashExtractorTransform . Column [ args . Column . Length ] ;
108
108
var colCount = args . Column . Length ;
109
109
List < string > tmpColNames = new List < string > ( ) ;
@@ -114,7 +114,7 @@ public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataV
114
114
var curTmpNames = new string [ srcCount ] ;
115
115
Contracts . Assert ( uniqueSourceNames [ iinfo ] . Length == args . Column [ iinfo ] . Source . Length ) ;
116
116
for ( int isrc = 0 ; isrc < srcCount ; isrc ++ )
117
- tokenizeColumns [ iinfo ] = new WordTokenizeTransform . ColumnInfo ( args . Column [ iinfo ] . Source [ isrc ] , curTmpNames [ isrc ] = uniqueSourceNames [ iinfo ] [ isrc ] ) ;
117
+ tokenizeColumns . Add ( new WordTokenizeTransform . ColumnInfo ( args . Column [ iinfo ] . Source [ isrc ] , curTmpNames [ isrc ] = uniqueSourceNames [ iinfo ] [ isrc ] ) ) ;
118
118
119
119
tmpColNames . AddRange ( curTmpNames ) ;
120
120
extractorCols [ iinfo ] =
@@ -133,7 +133,7 @@ public static IDataTransform Create(IHostEnvironment env, Arguments args, IDataV
133
133
} ;
134
134
}
135
135
136
- view = new WordTokenizingEstimator ( env , tokenizeColumns ) . Fit ( view ) . Transform ( view ) ;
136
+ view = new WordTokenizingEstimator ( env , tokenizeColumns . ToArray ( ) ) . Fit ( view ) . Transform ( view ) ;
137
137
138
138
var featurizeArgs =
139
139
new NgramHashExtractorTransform . Arguments
0 commit comments