33// See the LICENSE file in the project root for more information.
44
55using System ;
6- using System . Linq ;
7- using System . Threading ;
8- using System . Threading . Tasks ;
96using System . Collections . Generic ;
107using Microsoft . ML . Runtime . Data ;
118using Microsoft . ML . Runtime . Internal . Utilities ;
129using Microsoft . ML . Runtime . Training ;
13- using Microsoft . ML . Runtime . FastTree . Internal ;
1410
1511namespace Microsoft . ML . Runtime . LightGBM
1612{
@@ -49,7 +45,13 @@ private sealed class CategoricalMetaData
4945
5046 protected readonly IHost Host ;
5147 protected readonly LightGbmArguments Args ;
52- protected readonly Dictionary < string , string > Options ;
48+
49+ /// <summary>
50+ /// Stores argumments as objects to convert them to invariant string type in the end so that
51+ /// the code is culture agnostic. When retrieving key value from this dictionary as string
52+ /// please convert to string invariant by string.Format(CultureInfo.InvariantCulture, "{0}", Option[key]).
53+ /// </summary>
54+ protected readonly Dictionary < string , object > Options ;
5355 protected readonly IParallel ParallelTraining ;
5456
5557 // Store _featureCount and _trainedEnsemble to construct predictor.
@@ -159,9 +161,9 @@ protected virtual void GetDefaultParameters(IChannel ch, int numRow, bool hasCat
159161 double learningRate = Args . LearningRate ?? DefaultLearningRate ( numRow , hasCategarical , totalCats ) ;
160162 int numLeaves = Args . NumLeaves ?? DefaultNumLeaves ( numRow , hasCategarical , totalCats ) ;
161163 int minDataPerLeaf = Args . MinDataPerLeaf ?? DefaultMinDataPerLeaf ( numRow , numLeaves , 1 ) ;
162- Options [ "learning_rate" ] = learningRate . ToString ( ) ;
163- Options [ "num_leaves" ] = numLeaves . ToString ( ) ;
164- Options [ "min_data_per_leaf" ] = minDataPerLeaf . ToString ( ) ;
164+ Options [ "learning_rate" ] = learningRate ;
165+ Options [ "num_leaves" ] = numLeaves ;
166+ Options [ "min_data_per_leaf" ] = minDataPerLeaf ;
165167 if ( ! hiddenMsg )
166168 {
167169 if ( ! Args . LearningRate . HasValue )
@@ -192,7 +194,7 @@ private static List<int> GetCategoricalBoundires(int[] categoricalFeatures, int
192194 {
193195 if ( j < categoricalFeatures . Length && curFidx == categoricalFeatures [ j ] )
194196 {
195- if ( curFidx > catBoundaries . Last ( ) )
197+ if ( curFidx > catBoundaries [ catBoundaries . Count - 1 ] )
196198 catBoundaries . Add ( curFidx ) ;
197199 if ( categoricalFeatures [ j + 1 ] - categoricalFeatures [ j ] >= 0 )
198200 {
@@ -219,7 +221,7 @@ private static List<int> GetCategoricalBoundires(int[] categoricalFeatures, int
219221 private static List < string > ConstructCategoricalFeatureMetaData ( int [ ] categoricalFeatures , int rawNumCol , ref CategoricalMetaData catMetaData )
220222 {
221223 List < int > catBoundaries = GetCategoricalBoundires ( categoricalFeatures , rawNumCol ) ;
222- catMetaData . NumCol = catBoundaries . Count ( ) - 1 ;
224+ catMetaData . NumCol = catBoundaries . Count - 1 ;
223225 catMetaData . CategoricalBoudaries = catBoundaries . ToArray ( ) ;
224226 catMetaData . IsCategoricalFeature = new bool [ catMetaData . NumCol ] ;
225227 catMetaData . OnehotIndices = new int [ rawNumCol ] ;
@@ -279,7 +281,7 @@ private CategoricalMetaData GetCategoricalMetaData(IChannel ch, RoleMappedData t
279281 {
280282 var catIndices = ConstructCategoricalFeatureMetaData ( categoricalFeatures , rawNumCol , ref catMetaData ) ;
281283 // Set categorical features
282- Options [ "categorical_feature" ] = String . Join ( "," , catIndices ) ;
284+ Options [ "categorical_feature" ] = string . Join ( "," , catIndices ) ;
283285 }
284286 return catMetaData ;
285287 }
@@ -527,13 +529,13 @@ private void GetFeatureValueSparse(IChannel ch, FloatLabelCursor cursor,
527529 ++ nhot ;
528530 var prob = rand . NextSingle ( ) ;
529531 if ( prob < 1.0f / nhot )
530- values [ values . Count ( ) - 1 ] = fv ;
532+ values [ values . Count - 1 ] = fv ;
531533 }
532534 lastIdx = newColIdx ;
533535 }
534536 indices = featureIndices . ToArray ( ) ;
535537 featureValues = values . ToArray ( ) ;
536- cnt = featureIndices . Count ( ) ;
538+ cnt = featureIndices . Count ;
537539 }
538540 else
539541 {
0 commit comments