diff --git a/.editorconfig b/.editorconfig index b567338281..6f99ba5dff 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,33 +1,306 @@ +# EditorConfig is awesome: https://EditorConfig.org + +# top-most EditorConfig file root = true -[*.cs] -# Sort using directives with System.* appearing first +# Don't use tabs for indentation. +[*] +indent_style = space +# (Please don't specify an indent_size here; that has too many unintended consequences.) + +# Code files +[*.{cs,csx,vb,vbx}] +indent_size = 4 +insert_final_newline = true +charset = utf-8-bom + +# XML project files +[*.{csproj,vbproj,vcxproj,vcxproj.filters,proj,projitems,shproj}] +indent_size = 2 + +# XML config files +[*.{props,targets,ruleset,config,nuspec,resx,vsixmanifest,vsct}] +indent_size = 2 + +# JSON files +[*.json] +indent_size = 2 + +# Powershell files +[*.ps1] +indent_size = 2 + +# Shell script files +[*.sh] +end_of_line = lf +indent_size = 2 + +# Dotnet code style settings: +[*.{cs,vb}] + +# IDE0055: Fix formatting +dotnet_diagnostic.IDE0055.severity = warning + +# Sort using and Import directives with System.* appearing first dotnet_sort_system_directives_first = true +dotnet_separate_import_directive_groups = false +# Avoid "this." and "Me." if not necessary +dotnet_style_qualification_for_field = false:refactoring +dotnet_style_qualification_for_property = false:refactoring +dotnet_style_qualification_for_method = false:refactoring +dotnet_style_qualification_for_event = false:refactoring -# VSTHRD002: Avoid problematic synchronous waits -dotnet_diagnostic.VSTHRD002.severity = none +# Use language keywords instead of framework type names for type references +dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion +dotnet_style_predefined_type_for_member_access = true:suggestion -[test/**/*.cs] +# Suggest more modern language features when available +dotnet_style_object_initializer = true:suggestion +dotnet_style_collection_initializer = true:suggestion +dotnet_style_coalesce_expression = true:suggestion +dotnet_style_null_propagation = true:suggestion +dotnet_style_explicit_tuple_names = true:suggestion -# MSML_GeneralName: This name should be PascalCased -dotnet_diagnostic.MSML_GeneralName.severity = none +# Whitespace options +dotnet_style_allow_multiple_blank_lines_experimental = false -# MSML_NoBestFriendInternal: Cross-assembly internal access requires referenced item to have Microsoft.ML.BestFriendAttribute attribute. -dotnet_diagnostic.MSML_NoBestFriendInternal.severity = none +# Non-private static fields are PascalCase +dotnet_naming_rule.non_private_static_fields_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.non_private_static_fields_should_be_pascal_case.symbols = non_private_static_fields +dotnet_naming_rule.non_private_static_fields_should_be_pascal_case.style = non_private_static_field_style -# MSML_NoInstanceInitializers: No initializers on instance fields or properties -dotnet_diagnostic.MSML_NoInstanceInitializers.severity = none +dotnet_naming_symbols.non_private_static_fields.applicable_kinds = field +dotnet_naming_symbols.non_private_static_fields.applicable_accessibilities = public, protected, internal, protected_internal, private_protected +dotnet_naming_symbols.non_private_static_fields.required_modifiers = static -[test/Microsoft.ML.CodeAnalyzer.Tests/**.cs] -# BaseTestClass does not apply for analyzer testing. -# MSML_ExtendBaseTestClass: Test classes should be derived from BaseTestClass -dotnet_diagnostic.MSML_ExtendBaseTestClass.severity = none +dotnet_naming_style.non_private_static_field_style.capitalization = pascal_case -# The MSML_RelaxTestNaming suppressor for VSTHRD200 is not active for CodeAnalyzer.Tests, so we disable it altogether. -# VSTHRD200: Use "Async" suffix for async methods -dotnet_diagnostic.VSTHRD200.severity = none +# Non-private readonly fields are PascalCase +dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.symbols = non_private_readonly_fields +dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.style = non_private_readonly_field_style -# Xml project files -[*.{csproj}] -indent_size = 2 -charset = utf-8 +dotnet_naming_symbols.non_private_readonly_fields.applicable_kinds = field +dotnet_naming_symbols.non_private_readonly_fields.applicable_accessibilities = public, protected, internal, protected_internal, private_protected +dotnet_naming_symbols.non_private_readonly_fields.required_modifiers = readonly + +dotnet_naming_style.non_private_readonly_field_style.capitalization = pascal_case + +# Constants are PascalCase +dotnet_naming_rule.constants_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.constants_should_be_pascal_case.symbols = constants +dotnet_naming_rule.constants_should_be_pascal_case.style = constant_style + +dotnet_naming_symbols.constants.applicable_kinds = field, local +dotnet_naming_symbols.constants.required_modifiers = const + +dotnet_naming_style.constant_style.capitalization = pascal_case + +# Static fields are camelCase and start with s_ +dotnet_naming_rule.static_fields_should_be_camel_case.severity = suggestion +dotnet_naming_rule.static_fields_should_be_camel_case.symbols = static_fields +dotnet_naming_rule.static_fields_should_be_camel_case.style = static_field_style + +dotnet_naming_symbols.static_fields.applicable_kinds = field +dotnet_naming_symbols.static_fields.required_modifiers = static + +dotnet_naming_style.static_field_style.capitalization = camel_case +dotnet_naming_style.static_field_style.required_prefix = s_ + +# Instance fields are camelCase and start with _ +dotnet_naming_rule.instance_fields_should_be_camel_case.severity = suggestion +dotnet_naming_rule.instance_fields_should_be_camel_case.symbols = instance_fields +dotnet_naming_rule.instance_fields_should_be_camel_case.style = instance_field_style + +dotnet_naming_symbols.instance_fields.applicable_kinds = field + +dotnet_naming_style.instance_field_style.capitalization = camel_case +dotnet_naming_style.instance_field_style.required_prefix = _ + +# Locals and parameters are camelCase +dotnet_naming_rule.locals_should_be_camel_case.severity = suggestion +dotnet_naming_rule.locals_should_be_camel_case.symbols = locals_and_parameters +dotnet_naming_rule.locals_should_be_camel_case.style = camel_case_style + +dotnet_naming_symbols.locals_and_parameters.applicable_kinds = parameter, local + +dotnet_naming_style.camel_case_style.capitalization = camel_case + +# Local functions are PascalCase +dotnet_naming_rule.local_functions_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.local_functions_should_be_pascal_case.symbols = local_functions +dotnet_naming_rule.local_functions_should_be_pascal_case.style = local_function_style + +dotnet_naming_symbols.local_functions.applicable_kinds = local_function + +dotnet_naming_style.local_function_style.capitalization = pascal_case + +# By default, name items with PascalCase +dotnet_naming_rule.members_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.members_should_be_pascal_case.symbols = all_members +dotnet_naming_rule.members_should_be_pascal_case.style = pascal_case_style + +dotnet_naming_symbols.all_members.applicable_kinds = * + +dotnet_naming_style.pascal_case_style.capitalization = pascal_case + +# error RS2008: Enable analyzer release tracking for the analyzer project containing rule '{0}' +dotnet_diagnostic.RS2008.severity = none + +# IDE0073: File header +dotnet_diagnostic.IDE0073.severity = warning +file_header_template = Licensed to the .NET Foundation under one or more agreements.\nThe .NET Foundation licenses this file to you under the MIT license.\nSee the LICENSE file in the project root for more information. + +# IDE0035: Remove unreachable code +dotnet_diagnostic.IDE0035.severity = warning + +# IDE0036: Order modifiers +dotnet_diagnostic.IDE0036.severity = warning + +# IDE0043: Format string contains invalid placeholder +dotnet_diagnostic.IDE0043.severity = warning + +# IDE0044: Make field readonly +dotnet_diagnostic.IDE0044.severity = warning + +# RS0016: Only enable if API files are present +dotnet_public_api_analyzer.require_api_files = true + +# CSharp code style settings: +[*.cs] +# Newline settings +csharp_new_line_before_open_brace = all +csharp_new_line_before_else = true +csharp_new_line_before_catch = true +csharp_new_line_before_finally = true +csharp_new_line_before_members_in_object_initializers = true +csharp_new_line_before_members_in_anonymous_types = true +csharp_new_line_between_query_expression_clauses = true + +# Indentation preferences +csharp_indent_block_contents = true +csharp_indent_braces = false +csharp_indent_case_contents = true +csharp_indent_case_contents_when_block = true +csharp_indent_switch_labels = true +csharp_indent_labels = flush_left + +# Whitespace options +csharp_style_allow_embedded_statements_on_same_line_experimental = false +csharp_style_allow_blank_lines_between_consecutive_braces_experimental = false +csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental = false + +# Prefer "var" everywhere +csharp_style_var_for_built_in_types = true:suggestion +csharp_style_var_when_type_is_apparent = true:suggestion +csharp_style_var_elsewhere = true:suggestion + +# Prefer method-like constructs to have a block body +csharp_style_expression_bodied_methods = false:none +csharp_style_expression_bodied_constructors = false:none +csharp_style_expression_bodied_operators = false:none + +# Prefer property-like constructs to have an expression-body +csharp_style_expression_bodied_properties = true:none +csharp_style_expression_bodied_indexers = true:none +csharp_style_expression_bodied_accessors = true:none + +# Suggest more modern language features when available +csharp_style_pattern_matching_over_is_with_cast_check = true:suggestion +csharp_style_pattern_matching_over_as_with_null_check = true:suggestion +csharp_style_inlined_variable_declaration = true:suggestion +csharp_style_throw_expression = true:suggestion +csharp_style_conditional_delegate_call = true:suggestion + +# Space preferences +csharp_space_after_cast = false +csharp_space_after_colon_in_inheritance_clause = true +csharp_space_after_comma = true +csharp_space_after_dot = false +csharp_space_after_keywords_in_control_flow_statements = true +csharp_space_after_semicolon_in_for_statement = true +csharp_space_around_binary_operators = before_and_after +csharp_space_around_declaration_statements = do_not_ignore +csharp_space_before_colon_in_inheritance_clause = true +csharp_space_before_comma = false +csharp_space_before_dot = false +csharp_space_before_open_square_brackets = false +csharp_space_before_semicolon_in_for_statement = false +csharp_space_between_empty_square_brackets = false +csharp_space_between_method_call_empty_parameter_list_parentheses = false +csharp_space_between_method_call_name_and_opening_parenthesis = false +csharp_space_between_method_call_parameter_list_parentheses = false +csharp_space_between_method_declaration_empty_parameter_list_parentheses = false +csharp_space_between_method_declaration_name_and_open_parenthesis = false +csharp_space_between_method_declaration_parameter_list_parentheses = false +csharp_space_between_parentheses = false +csharp_space_between_square_brackets = false + +# Blocks are allowed +csharp_prefer_braces = true:silent +csharp_preserve_single_line_blocks = true +csharp_preserve_single_line_statements = true + +# Currently only enabled for C# due to crash in VB analyzer. VB can be enabled once +# https://github.com/dotnet/roslyn/pull/54259 has been published. +dotnet_style_allow_statement_immediately_after_block_experimental = false + +[src/CodeStyle/**.{cs,vb}] +# warning RS0005: Do not use generic CodeAction.Create to create CodeAction +dotnet_diagnostic.RS0005.severity = none + +[src/{Analyzers,CodeStyle,Features,Workspaces,EditorFeatures,VisualStudio}/**/*.{cs,vb}] + +# IDE0011: Add braces +csharp_prefer_braces = when_multiline:warning +# NOTE: We need the below severity entry for Add Braces due to https://github.com/dotnet/roslyn/issues/44201 +dotnet_diagnostic.IDE0011.severity = warning + +# IDE0040: Add accessibility modifiers +dotnet_diagnostic.IDE0040.severity = warning + +# CONSIDER: Are IDE0051 and IDE0052 too noisy to be warnings for IDE editing scenarios? Should they be made build-only warnings? +# IDE0051: Remove unused private member +dotnet_diagnostic.IDE0051.severity = warning + +# IDE0052: Remove unread private member +dotnet_diagnostic.IDE0052.severity = warning + +# IDE0059: Unnecessary assignment to a value +dotnet_diagnostic.IDE0059.severity = warning + +# IDE0060: Remove unused parameter +dotnet_diagnostic.IDE0060.severity = warning + +# CA1012: Abstract types should not have public constructors +dotnet_diagnostic.CA1012.severity = warning + +# CA1822: Make member static +dotnet_diagnostic.CA1822.severity = warning + +# Prefer "var" everywhere +dotnet_diagnostic.IDE0007.severity = warning +csharp_style_var_for_built_in_types = true:warning +csharp_style_var_when_type_is_apparent = true:warning +csharp_style_var_elsewhere = true:warning + +# dotnet_style_allow_multiple_blank_lines_experimental +dotnet_diagnostic.IDE2000.severity = warning + +# csharp_style_allow_embedded_statements_on_same_line_experimental +dotnet_diagnostic.IDE2001.severity = warning + +# csharp_style_allow_blank_lines_between_consecutive_braces_experimental +dotnet_diagnostic.IDE2002.severity = warning + +# dotnet_style_allow_statement_immediately_after_block_experimental +dotnet_diagnostic.IDE2003.severity = warning + +# csharp_style_allow_blank_line_after_colon_in_constructor_initializer_experimental +dotnet_diagnostic.IDE2004.severity = warning + +[src/{VisualStudio}/**/*.{cs,vb}] +# CA1822: Make member static +# Not enforced as a build 'warning' for 'VisualStudio' layer due to large number of false positives from https://github.com/dotnet/roslyn-analyzers/issues/3857 and https://github.com/dotnet/roslyn-analyzers/issues/3858 +# Additionally, there is a risk of accidentally breaking an internal API that partners rely on though IVT. +dotnet_diagnostic.CA1822.severity = suggestion \ No newline at end of file diff --git a/docs/samples/Microsoft.ML.AutoML.Samples/DataStructures/MovieRatingPrediction.cs b/docs/samples/Microsoft.ML.AutoML.Samples/DataStructures/MovieRatingPrediction.cs index 6c4d6cdd2c..98950aed76 100644 --- a/docs/samples/Microsoft.ML.AutoML.Samples/DataStructures/MovieRatingPrediction.cs +++ b/docs/samples/Microsoft.ML.AutoML.Samples/DataStructures/MovieRatingPrediction.cs @@ -11,4 +11,4 @@ public class MovieRatingPrediction [ColumnName("Score")] public float Rating; } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.AutoML.Samples/MulticlassClassificationExperiment.cs b/docs/samples/Microsoft.ML.AutoML.Samples/MulticlassClassificationExperiment.cs index 4fb2ec1073..7d829f8623 100644 --- a/docs/samples/Microsoft.ML.AutoML.Samples/MulticlassClassificationExperiment.cs +++ b/docs/samples/Microsoft.ML.AutoML.Samples/MulticlassClassificationExperiment.cs @@ -59,7 +59,7 @@ public static void Run() Console.WriteLine("Press any key to continue..."); Console.ReadKey(); } - + private static void PrintMetrics(MulticlassClassificationMetrics metrics) { Console.WriteLine($"LogLoss: {metrics.LogLoss}"); diff --git a/docs/samples/Microsoft.ML.AutoML.Samples/RankingExperiment.cs b/docs/samples/Microsoft.ML.AutoML.Samples/RankingExperiment.cs index 8d04914fb4..42abd9cb37 100644 --- a/docs/samples/Microsoft.ML.AutoML.Samples/RankingExperiment.cs +++ b/docs/samples/Microsoft.ML.AutoML.Samples/RankingExperiment.cs @@ -69,7 +69,7 @@ public static void Run() testPage = new SearchData { GroupId = "2", - Features = 2, + Features = 2, Label = 9 }; prediction = predictionEngine.Predict(testPage); diff --git a/docs/samples/Microsoft.ML.AutoML.Samples/RecommendationExperiment.cs b/docs/samples/Microsoft.ML.AutoML.Samples/RecommendationExperiment.cs index 95f453cc30..7d4b75b962 100644 --- a/docs/samples/Microsoft.ML.AutoML.Samples/RecommendationExperiment.cs +++ b/docs/samples/Microsoft.ML.AutoML.Samples/RecommendationExperiment.cs @@ -89,4 +89,4 @@ private static void PrintMetrics(RegressionMetrics metrics) Console.WriteLine($"RSquared: {metrics.RSquared}"); } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.AutoML.Samples/RegressionExperiment.cs b/docs/samples/Microsoft.ML.AutoML.Samples/RegressionExperiment.cs index 5c6d3b3195..bffca6fc0f 100644 --- a/docs/samples/Microsoft.ML.AutoML.Samples/RegressionExperiment.cs +++ b/docs/samples/Microsoft.ML.AutoML.Samples/RegressionExperiment.cs @@ -13,7 +13,7 @@ public static class RegressionExperiment private static string ModelPath = @"\TaxiFareModel.zip"; private static string LabelColumnName = "FareAmount"; private static uint ExperimentTime = 60; - + public static void Run() { MLContext mlContext = new MLContext(); diff --git a/docs/samples/Microsoft.ML.Samples.GPU/Program.cs b/docs/samples/Microsoft.ML.Samples.GPU/Program.cs index 77fc8e1b9f..f64d51f275 100644 --- a/docs/samples/Microsoft.ML.Samples.GPU/Program.cs +++ b/docs/samples/Microsoft.ML.Samples.GPU/Program.cs @@ -27,4 +27,4 @@ internal static void RunAll() Console.WriteLine("Number of samples that ran without any exception: " + samples); } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/CrossValidationSplit.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/CrossValidationSplit.cs index cbe68ba117..0901e61d8b 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/CrossValidationSplit.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/CrossValidationSplit.cs @@ -28,7 +28,7 @@ public static void Example() // that column to cross validation it would be used to break data into // certain chunks. var folds = mlContext.Data - .CrossValidationSplit(dataview, numberOfFolds:3, + .CrossValidationSplit(dataview, numberOfFolds: 3, samplingKeyColumnName: "Group"); var trainSet = mlContext.Data @@ -153,7 +153,7 @@ public static void Example() .CreateEnumerable(folds[2].TrainSet, reuseRowObject: false); - testSet = mlContext.Data.CreateEnumerable(folds[2].TestSet, + testSet = mlContext.Data.CreateEnumerable(folds[2].TestSet, reuseRowObject: false); PrintPreviewRows(trainSet, testSet); @@ -172,7 +172,7 @@ public static void Example() // [Group, 1], [Features, 0.4421779] } - private static IEnumerable GenerateRandomDataPoints(int count, + private static IEnumerable GenerateRandomDataPoints(int count, int seed = 0) { @@ -199,7 +199,7 @@ private class DataPoint } // print helper - private static void PrintPreviewRows(IEnumerable trainSet, + private static void PrintPreviewRows(IEnumerable trainSet, IEnumerable testSet) { diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/DataViewEnumerable.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/DataViewEnumerable.cs index aaefc85597..0aa418cfcf 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/DataViewEnumerable.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/DataViewEnumerable.cs @@ -17,30 +17,30 @@ public static void Example() // Get a small dataset as an IEnumerable. IEnumerable enumerableOfData = - GetSampleTemperatureData(5); + GetSampleTemperatureData(5); // Load dataset into an IDataView. IDataView data = mlContext.Data.LoadFromEnumerable(enumerableOfData); // We can now examine the records in the IDataView. We first create an - // enumerable of rows in the IDataView. + // enumerable of rows in the IDataView. var rowEnumerable = mlContext.Data - .CreateEnumerable(data, - reuseRowObject: true); + .CreateEnumerable(data, + reuseRowObject: true); // SampleTemperatureDataWithLatitude has the definition of a Latitude - // column of type float. We can use the parameter ignoreMissingColumns - // to true to ignore any missing columns in the IDataView. The produced - // enumerable will have the Latitude field set to the default for the - // data type, in this case 0. + // column of type float. We can use the parameter ignoreMissingColumns + // to true to ignore any missing columns in the IDataView. The produced + // enumerable will have the Latitude field set to the default for the + // data type, in this case 0. var rowEnumerableIgnoreMissing = mlContext.Data - .CreateEnumerable(data, - reuseRowObject: true, ignoreMissingColumns: true); + .CreateEnumerable(data, + reuseRowObject: true, ignoreMissingColumns: true); Console.WriteLine($"Date\tTemperature"); foreach (var row in rowEnumerable) Console.WriteLine( - $"{row.Date.ToString("d")}\t{row.Temperature}"); + $"{row.Date.ToString("d")}\t{row.Temperature}"); // Expected output: // Date Temperature @@ -52,8 +52,8 @@ public static void Example() Console.WriteLine($"Date\tTemperature\tLatitude"); foreach (var row in rowEnumerableIgnoreMissing) - Console.WriteLine($"{row.Date.ToString("d")}\t{row.Temperature}" - + $"\t{row.Latitude}"); + Console.WriteLine($"{row.Date.ToString("d")}\t{row.Temperature}" + + $"\t{row.Latitude}"); // Expected output: // Date Temperature Latitude @@ -69,21 +69,21 @@ private class SampleTemperatureData public DateTime Date { get; set; } public float Temperature { get; set; } } - - private class SampleTemperatureDataWithLatitude - { - public float Latitude { get; set; } - public DateTime Date { get; set; } - public float Temperature { get; set; } - } - + + private class SampleTemperatureDataWithLatitude + { + public float Latitude { get; set; } + public DateTime Date { get; set; } + public float Temperature { get; set; } + } + /// /// Get a fake temperature dataset. /// /// The number of examples to return. /// An enumerable of . private static IEnumerable GetSampleTemperatureData( - int exampleCount) + int exampleCount) { var rng = new Random(1234321); @@ -94,8 +94,12 @@ private class SampleTemperatureDataWithLatitude { date = date.AddDays(1); temperature += rng.Next(-5, 5); - yield return new SampleTemperatureData { Date = date, Temperature = - temperature }; + yield return new SampleTemperatureData + { + Date = date, + Temperature = + temperature + }; } } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByColumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByColumn.cs index af6df27200..cbbf83677e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByColumn.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByColumn.cs @@ -12,7 +12,7 @@ public static void Example() { // Create a new context for ML.NET operations. It can be used for // exception tracking and logging, as a catalog of available - // operations and as the source of randomness. + // operations and as the source of randomness. var mlContext = new MLContext(); // Get a small dataset as an IEnumerable. @@ -24,7 +24,7 @@ public static void Example() foreach (var row in enumerableOfData) { Console.WriteLine( - $"{row.Date.ToString("d")}\t{row.Temperature}"); + $"{row.Date.ToString("d")}\t{row.Temperature}"); } Console.WriteLine(); @@ -42,22 +42,22 @@ public static void Example() // 1/11/2012 29 // Filter the data by the values of the temperature. The lower bound is - // inclusive, the upper exclusive. + // inclusive, the upper exclusive. var filteredData = mlContext.Data - .FilterRowsByColumn(data, columnName: "Temperature", - lowerBound: 34, upperBound: 37); + .FilterRowsByColumn(data, columnName: "Temperature", + lowerBound: 34, upperBound: 37); // Look at the filtered data and observe that values outside [34,37) - // have been dropped. + // have been dropped. var enumerable = mlContext.Data - .CreateEnumerable(filteredData, - reuseRowObject: true); + .CreateEnumerable(filteredData, + reuseRowObject: true); Console.WriteLine($"Date\tTemperature"); foreach (var row in enumerable) { Console.WriteLine( - $"{row.Date.ToString("d")}\t{row.Temperature}"); + $"{row.Date.ToString("d")}\t{row.Temperature}"); } @@ -76,14 +76,14 @@ private class SampleTemperatureData public DateTime Date { get; set; } public float Temperature { get; set; } } - + /// /// Get a fake temperature dataset. /// /// The number of examples to return. /// An enumerable of . private static IEnumerable GetSampleTemperatureData( - int exampleCount) + int exampleCount) { var rng = new Random(1234321); @@ -94,8 +94,12 @@ private class SampleTemperatureData { date = date.AddDays(1); temperature += rng.Next(-5, 5); - yield return new SampleTemperatureData { Date = date, Temperature = - temperature }; + yield return new SampleTemperatureData + { + Date = date, + Temperature = + temperature + }; } } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByCustomPredicate.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByCustomPredicate.cs index 3e552e52b4..2d502be346 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByCustomPredicate.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByCustomPredicate.cs @@ -35,7 +35,7 @@ public static void Example() Console.WriteLine(); // Expected output: - + // Name // Joey // Chandler diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByKeyColumnFraction.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByKeyColumnFraction.cs index 6b95bdf05e..c036e547a7 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByKeyColumnFraction.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/FilterRowsByKeyColumnFraction.cs @@ -99,7 +99,7 @@ public static void Example() // Look at the data and observe that values above 2 have been filtered // out var filteredMiddleEnumerable = mlContext.Data - .CreateEnumerable(filteredMiddleData, + .CreateEnumerable(filteredMiddleData, reuseRowObject: true); Console.WriteLine($"Age"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/LoadingText.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/LoadingText.cs index 4f0a32dc9f..d3a8a27e98 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/LoadingText.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/LoadingText.cs @@ -65,7 +65,7 @@ public static void Example() // Load all files using path wildcard. - var multipleFilesWildcardData = + var multipleFilesWildcardData = loader.Load(Path.Combine(dataDirectoryName, "Data_*.csv")); PrintRowCount(multipleFilesWildcardData); @@ -110,7 +110,7 @@ public static void Example() // which is of type Vector. PrintRowCount(dataWithInferredLength); - + // Expected Output: // 10 diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/ShuffleRows.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/ShuffleRows.cs index 73ef6657e1..67e011d859 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/ShuffleRows.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/ShuffleRows.cs @@ -12,7 +12,7 @@ public static void Example() { // Create a new context for ML.NET operations. It can be used for // exception tracking and logging, as a catalog of available operations - // and as the source of randomness. + // and as the source of randomness. var mlContext = new MLContext(); // Get a small dataset as an IEnumerable. @@ -24,7 +24,7 @@ public static void Example() foreach (var row in enumerableOfData) { Console.WriteLine($"{row.Date.ToString("d")}" + - $"\t{row.Temperature}"); + $"\t{row.Temperature}"); } Console.WriteLine(); // Expected output: @@ -39,16 +39,16 @@ public static void Example() var shuffledData = mlContext.Data.ShuffleRows(data, seed: 123); // Look at the shuffled data and observe that the rows are in a - // randomized order. + // randomized order. var enumerable = mlContext.Data - .CreateEnumerable(shuffledData, - reuseRowObject: true); + .CreateEnumerable(shuffledData, + reuseRowObject: true); Console.WriteLine($"Date\tTemperature"); foreach (var row in enumerable) { Console.WriteLine($"{row.Date.ToString("d")}" + - $"\t{row.Temperature}"); + $"\t{row.Temperature}"); } // Expected output: // Date Temperature @@ -64,14 +64,14 @@ private class SampleTemperatureData public DateTime Date { get; set; } public float Temperature { get; set; } } - + /// /// Get a fake temperature dataset. /// /// The number of examples to return. /// An enumerable of . private static IEnumerable GetSampleTemperatureData( - int exampleCount) + int exampleCount) { var rng = new Random(1234321); @@ -82,8 +82,12 @@ private class SampleTemperatureData { date = date.AddDays(1); temperature += rng.Next(-5, 5); - yield return new SampleTemperatureData { Date = date, Temperature = - temperature }; + yield return new SampleTemperatureData + { + Date = date, + Temperature = + temperature + }; } } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/SkipRows.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/SkipRows.cs index 006082f238..1ebaa50d36 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/SkipRows.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/SkipRows.cs @@ -11,7 +11,7 @@ public static void Example() { // Create a new context for ML.NET operations. It can be used for // exception tracking and logging, as a catalog of available operations - // and as the source of randomness. + // and as the source of randomness. var mlContext = new MLContext(); // Get a small dataset as an IEnumerable. @@ -23,7 +23,7 @@ public static void Example() foreach (var row in enumerableOfData) { Console.WriteLine($"{row.Date.ToString("d")}" + - $"\t{row.Temperature}"); + $"\t{row.Temperature}"); } Console.WriteLine(); // Expected output: @@ -43,16 +43,16 @@ public static void Example() var filteredData = mlContext.Data.SkipRows(data, 5); // Look at the filtered data and observe that the first 5 rows have been - // dropped + // dropped var enumerable = mlContext.Data - .CreateEnumerable(filteredData, - reuseRowObject: true); + .CreateEnumerable(filteredData, + reuseRowObject: true); Console.WriteLine($"Date\tTemperature"); foreach (var row in enumerable) { Console.WriteLine($"{row.Date.ToString("d")}" + - $"\t{row.Temperature}"); + $"\t{row.Temperature}"); } // Expected output: // Date Temperature @@ -68,14 +68,14 @@ private class SampleTemperatureData public DateTime Date { get; set; } public float Temperature { get; set; } } - + /// /// Get a fake temperature dataset. /// /// The number of examples to return. /// An enumerable of . private static IEnumerable GetSampleTemperatureData( - int exampleCount) + int exampleCount) { var rng = new Random(1234321); @@ -86,8 +86,12 @@ private class SampleTemperatureData { date = date.AddDays(1); temperature += rng.Next(-5, 5); - yield return new SampleTemperatureData { Date = date, Temperature = - temperature }; + yield return new SampleTemperatureData + { + Date = date, + Temperature = + temperature + }; } } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/TakeRows.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/TakeRows.cs index 5ad6f13f7f..6c469d79d6 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/TakeRows.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/TakeRows.cs @@ -24,7 +24,7 @@ public static void Example() foreach (var row in enumerableOfData) { Console.WriteLine($"{row.Date.ToString("d")}" + - $"\t{row.Temperature}"); + $"\t{row.Temperature}"); } Console.WriteLine(); // Expected output: @@ -44,16 +44,16 @@ public static void Example() var filteredData = mlContext.Data.TakeRows(data, 5); // Look at the filtered data and observe that only the first 5 rows are - // in the resulting dataset. + // in the resulting dataset. var enumerable = mlContext.Data - .CreateEnumerable(filteredData, - reuseRowObject: true); + .CreateEnumerable(filteredData, + reuseRowObject: true); Console.WriteLine($"Date\tTemperature"); foreach (var row in enumerable) { Console.WriteLine($"{row.Date.ToString("d")}" + - $"\t{row.Temperature}"); + $"\t{row.Temperature}"); } // Expected output: // Date Temperature @@ -69,14 +69,14 @@ private class SampleTemperatureData public DateTime Date { get; set; } public float Temperature { get; set; } } - + /// /// Get a fake temperature dataset. /// /// The number of examples to return. /// An enumerable of . private static IEnumerable GetSampleTemperatureData( - int exampleCount) + int exampleCount) { var rng = new Random(1234321); @@ -87,8 +87,12 @@ private class SampleTemperatureData { date = date.AddDays(1); temperature += rng.Next(-5, 5); - yield return new SampleTemperatureData { Date = date, Temperature = - temperature }; + yield return new SampleTemperatureData + { + Date = date, + Temperature = + temperature + }; } } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/TrainTestSplit.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/TrainTestSplit.cs index e4fc26296b..5c9986f1e8 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/TrainTestSplit.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/DataOperations/TrainTestSplit.cs @@ -35,7 +35,7 @@ public static void Example() .CreateEnumerable(split.TrainSet, reuseRowObject: false); var testSet = mlContext.Data - .CreateEnumerable(split.TestSet,reuseRowObject: false); + .CreateEnumerable(split.TestSet, reuseRowObject: false); PrintPreviewRows(trainSet, testSet); @@ -56,10 +56,10 @@ public static void Example() // Example of a split without specifying a sampling key column. split = mlContext.Data.TrainTestSplit(dataview, testFraction: 0.2); trainSet = mlContext.Data - .CreateEnumerable(split.TrainSet,reuseRowObject: false); + .CreateEnumerable(split.TrainSet, reuseRowObject: false); testSet = mlContext.Data - .CreateEnumerable(split.TestSet,reuseRowObject: false); + .CreateEnumerable(split.TestSet, reuseRowObject: false); PrintPreviewRows(trainSet, testSet); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow/ImageClassification.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow/ImageClassification.cs index f3201f3389..e84bad2c5c 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow/ImageClassification.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow/ImageClassification.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.IO; using System.Linq; using System.Net; @@ -71,7 +71,7 @@ public static void Example() //---------- } - private const int imageHeight = 224; + private const int imageHeight = 224; private const int imageWidth = 224; private const int numChannels = 3; private const int inputSize = imageHeight * imageWidth * numChannels; @@ -95,7 +95,7 @@ public static TensorData[] GetTensorData() // This can be any numerical data. Assume image pixel values. var image1 = Enumerable.Range(0, inputSize).Select( x => (float)x / inputSize).ToArray(); - + var image2 = Enumerable.Range(0, inputSize).Select( x => (float)(x + 10000) / inputSize).ToArray(); return new TensorData[] { new TensorData() { input = image1 }, diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow/TextClassification.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow/TextClassification.cs index 2dbadbc3f2..5eb4fdd2ef 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow/TextClassification.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/TensorFlow/TextClassification.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.IO; using Microsoft.ML; using Microsoft.ML.Data; @@ -87,7 +87,7 @@ public static void Example() // 5. Retreives the 'Prediction' from TensorFlow and put it into // ML.NET Pipeline - Action ResizeFeaturesAction = + Action ResizeFeaturesAction = (i, j) => { j.Sentiment_Text = i.Sentiment_Text; @@ -123,9 +123,9 @@ public static void Example() var prediction = engine.Predict(data[0]); Console.WriteLine("Number of classes: {0}", prediction.Prediction - .Length); + .Length); Console.WriteLine("Is sentiment/review positive? {0}", prediction - .Prediction[1] > 0.5 ? "Yes." : "No."); + .Prediction[1] > 0.5 ? "Yes." : "No."); Console.WriteLine("Prediction Confidence: {0}", prediction.Prediction[1] .ToString("0.00")); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/TextTransform.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/TextTransform.cs index db8f7c4961..79dd300fca 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/TextTransform.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/TextTransform.cs @@ -42,7 +42,7 @@ public static void Example() // pipeline uses the default settings to featurize. string defaultColumnName = "DefaultTextFeatures"; var default_pipeline = ml.Transforms.Text - .FeaturizeText(defaultColumnName , "SentimentText"); + .FeaturizeText(defaultColumnName, "SentimentText"); // Another pipeline, that customizes the advanced settings of the // FeaturizeText transformer. @@ -51,17 +51,19 @@ public static void Example() .FeaturizeText(customizedColumnName, new TextFeaturizingEstimator.Options - { - KeepPunctuations = false, - KeepNumbers = false, - OutputTokensColumnName = "OutputTokens", - StopWordsRemoverOptions = - new StopWordsRemovingEstimator.Options() { - Language = TextFeaturizingEstimator.Language.English }, + { + KeepPunctuations = false, + KeepNumbers = false, + OutputTokensColumnName = "OutputTokens", + StopWordsRemoverOptions = + new StopWordsRemovingEstimator.Options() + { + Language = TextFeaturizingEstimator.Language.English + }, // supports English, French, German, Dutch, Italian, Spanish, // Japanese - }, "SentimentText"); + }, "SentimentText"); // The transformed data for both pipelines. var transformedData_default = default_pipeline.Fit(trainData) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptron.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptron.cs index fd24a32b39..02ebc2fd0c 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptron.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptron.cs @@ -33,7 +33,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -45,7 +45,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -54,13 +54,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: False // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.72 // AUC: 0.79 @@ -82,7 +82,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -99,7 +99,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -128,7 +128,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptronWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptronWithOptions.cs index 8f31f474e4..5c811da27f 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptronWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptronWithOptions.cs @@ -44,7 +44,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -56,7 +56,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -65,13 +65,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.89 // AUC: 0.96 @@ -93,7 +93,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -110,7 +110,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -139,7 +139,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FactorizationMachine.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FactorizationMachine.cs index 4e8bec4f4e..e8efb8d883 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FactorizationMachine.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FactorizationMachine.cs @@ -41,7 +41,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -53,7 +53,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -62,13 +62,13 @@ public static void Example() // Label: True, Prediction: False // Label: True, Prediction: False // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.55 // AUC: 0.54 @@ -90,7 +90,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -107,7 +107,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -136,7 +136,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastForest.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastForest.cs index f33feb7063..f174a676f7 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastForest.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastForest.cs @@ -36,7 +36,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -48,7 +48,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -57,13 +57,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.74 // AUC: 0.83 @@ -85,7 +85,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -102,7 +102,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -131,7 +131,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastForestWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastForestWithOptions.cs index c5bf4d5366..0bb450c7bf 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastForestWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastForestWithOptions.cs @@ -48,7 +48,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -60,7 +60,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -69,13 +69,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: True - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.73 // AUC: 0.81 @@ -97,7 +97,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -114,7 +114,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -143,7 +143,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastTree.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastTree.cs index f50b8f9732..9188a2cb52 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastTree.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastTree.cs @@ -36,7 +36,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -48,7 +48,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -57,13 +57,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.81 // AUC: 0.91 @@ -88,7 +88,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -105,7 +105,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -134,7 +134,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastTreeWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastTreeWithOptions.cs index 87e894d903..eb65055de6 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastTreeWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FastTreeWithOptions.cs @@ -48,7 +48,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -60,7 +60,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -69,13 +69,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.78 // AUC: 0.88 @@ -100,7 +100,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -117,7 +117,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -146,7 +146,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FieldAwareFactorizationMachine.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FieldAwareFactorizationMachine.cs index af62175204..a01ab05074 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FieldAwareFactorizationMachine.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FieldAwareFactorizationMachine.cs @@ -39,7 +39,7 @@ public static void Example() new[] {nameof(DataPoint.Field0), nameof(DataPoint.Field1), nameof(DataPoint.Field2) }, // Specify binary label's column name. - nameof(DataPoint.Label) ); + nameof(DataPoint.Label)); // Train the model. var model = pipeline.Fit(trainingData); @@ -81,7 +81,7 @@ public static void Example() .CreatePredictionEngine(model); // Make some predictions. - foreach(var dataPoint in data.Take(5)) + foreach (var dataPoint in data.Take(5)) { var result = engine.Predict(dataPoint); Console.WriteLine($"Actual label: {dataPoint.Label}, " @@ -195,11 +195,11 @@ public class Result Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); - Console.WriteLine($"Positive Precision: " + + Console.WriteLine($"Positive Precision: " + $"{metrics.PositivePrecision:F2}"); Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FieldAwareFactorizationMachineWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FieldAwareFactorizationMachineWithOptions.cs index d0377015cf..bb4c45553c 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FieldAwareFactorizationMachineWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/FieldAwareFactorizationMachineWithOptions.cs @@ -31,7 +31,7 @@ public static void Example() var options = new FieldAwareFactorizationMachineTrainer.Options { FeatureColumnName = nameof(DataPoint.Field0), - ExtraFeatureColumns = + ExtraFeatureColumns = new[] { nameof(DataPoint.Field1), nameof(DataPoint.Field2) }, LabelColumnName = nameof(DataPoint.Label), @@ -92,7 +92,7 @@ public static void Example() .CreatePredictionEngine(model); // Make some predictions. - foreach(var dataPoint in data.Take(5)) + foreach (var dataPoint in data.Take(5)) { var result = engine.Predict(dataPoint); Console.WriteLine($"Actual label: {dataPoint.Label}, " @@ -206,11 +206,11 @@ public class Result Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); - Console.WriteLine($"Positive Precision: " + + Console.WriteLine($"Positive Precision: " + $"{metrics.PositivePrecision:F2}"); Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/Gam.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/Gam.cs index d4ce855e3c..368e18debd 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/Gam.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/Gam.cs @@ -16,7 +16,7 @@ public static void Example() // exception tracking and logging, as a catalog of available operations // and as the source of randomness. var mlContext = new MLContext(); - + // Create the dataset. var samples = GenerateData(); @@ -43,7 +43,7 @@ public static void Example() // a technique called pruning to tune the model to the validation set // after training to improve generalization. var model = trainer.Fit(trainSet, validSet); - + // Extract the model parameters. var gam = model.Model.SubModel; @@ -147,7 +147,8 @@ private class Data for (int i = 0; i < numExamples; i++) { // Generate random, uncoupled features. - var data = new Data { + var data = new Data + { Features = new float[2] { centeredFloat(), centeredFloat() } }; // Compute the label from the shape functions and add noise. diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/GamWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/GamWithOptions.cs index cdacd51b93..6e85040bb5 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/GamWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/GamWithOptions.cs @@ -40,7 +40,8 @@ public static void Example() // the default to slow down the gradient descent, and double the number // of iterations to compensate. var trainer = mlContext.BinaryClassification.Trainers.Gam( - new GamBinaryTrainer.Options { + new GamBinaryTrainer.Options + { NumberOfIterations = 19000, MaximumBinCountPerFeature = 16, LearningRate = 0.001 diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LbfgsLogisticRegression.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LbfgsLogisticRegression.cs index 5aece5a264..015f938bab 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LbfgsLogisticRegression.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LbfgsLogisticRegression.cs @@ -33,7 +33,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -45,7 +45,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -54,13 +54,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.88 // AUC: 0.96 @@ -85,7 +85,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -102,7 +102,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -131,7 +131,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LbfgsLogisticRegressionWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LbfgsLogisticRegressionWithOptions.cs index fe2fcb14ab..5a53dd2077 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LbfgsLogisticRegressionWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LbfgsLogisticRegressionWithOptions.cs @@ -42,7 +42,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -54,7 +54,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -63,13 +63,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.87 // AUC: 0.96 @@ -94,7 +94,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -111,7 +111,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -140,7 +140,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvm.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvm.cs index f2091e99f9..3f0cc12c4a 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvm.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvm.cs @@ -33,7 +33,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -45,7 +45,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -54,13 +54,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.82 // AUC: 0.85 @@ -82,7 +82,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -99,7 +99,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -128,7 +128,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvmWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvmWithOptions.cs index 83234021d8..96934987ea 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvmWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvmWithOptions.cs @@ -42,7 +42,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -54,7 +54,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -63,13 +63,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.80 // AUC: 0.89 @@ -91,7 +91,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -108,7 +108,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -137,7 +137,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LightGbm.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LightGbm.cs index c89a4b2b2c..fa73f96cb5 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LightGbm.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LightGbm.cs @@ -36,7 +36,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -48,7 +48,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -57,13 +57,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.77 // AUC: 0.85 @@ -85,7 +85,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -102,7 +102,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -131,7 +131,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LightGbmWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LightGbmWithOptions.cs index 5df0a59ee5..ea7834075a 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LightGbmWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LightGbmWithOptions.cs @@ -47,7 +47,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -59,7 +59,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -68,13 +68,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.71 // AUC: 0.76 @@ -96,7 +96,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -113,7 +113,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -142,7 +142,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LinearSvm.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LinearSvm.cs index f47279c331..2371b0ef4e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LinearSvm.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LinearSvm.cs @@ -33,7 +33,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -45,7 +45,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -54,13 +54,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: True - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.73 // AUC: 0.83 @@ -82,7 +82,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -99,7 +99,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -128,7 +128,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LinearSvmWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LinearSvmWithOptions.cs index 857ac93439..0088dd6613 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LinearSvmWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LinearSvmWithOptions.cs @@ -42,7 +42,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -54,7 +54,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -63,13 +63,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.85 // AUC: 0.95 @@ -91,7 +91,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -108,7 +108,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -137,7 +137,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PermutationFeatureImportance.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PermutationFeatureImportance.cs index f8db811758..7a9d18f564 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PermutationFeatureImportance.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PermutationFeatureImportance.cs @@ -12,7 +12,7 @@ public static void Example() // Create a new context for ML.NET operations. It can be used for // exception tracking and logging, as a catalog of available operations // and as the source of randomness. - var mlContext = new MLContext(seed:1); + var mlContext = new MLContext(seed: 1); // Create sample data. var samples = GenerateData(); @@ -48,7 +48,7 @@ public static void Example() // Now let's look at which features are most important to the model // overall. Get the feature indices sorted by their impact on AUC. var sortedIndices = permutationMetrics - .Select((metrics, index) => new { index, metrics.AreaUnderRocCurve}) + .Select((metrics, index) => new { index, metrics.AreaUnderRocCurve }) .OrderByDescending( feature => Math.Abs(feature.AreaUnderRocCurve.Mean)) .Select(feature => feature.index); @@ -107,7 +107,7 @@ private class Data }; // Create a noisy label. - var value = (float)(bias + weight1 * data.Feature1 + weight2 * + var value = (float)(bias + weight1 * data.Feature1 + weight2 * data.Feature2 + rng.NextDouble() - 0.5); data.Label = Sigmoid(value) > 0.5; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PermutationFeatureImportanceLoadFromDisk.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PermutationFeatureImportanceLoadFromDisk.cs index d0c41484d4..b631f1079b 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PermutationFeatureImportanceLoadFromDisk.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PermutationFeatureImportanceLoadFromDisk.cs @@ -103,4 +103,4 @@ private class Data private static double Sigmoid(double x) => 1.0 / (1.0 + Math.Exp(-1 * x)); } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PriorTrainer.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PriorTrainer.cs index be54597f13..e2c0da3f32 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PriorTrainer.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/PriorTrainer.cs @@ -33,7 +33,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -45,7 +45,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -54,13 +54,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: True - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.68 // AUC: 0.50 (this is expected for Prior trainer) @@ -82,7 +82,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -99,7 +99,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.3f).ToArray() - + }; } } @@ -128,7 +128,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaLogisticRegression.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaLogisticRegression.cs index 521c6e671f..0a6e238610 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaLogisticRegression.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaLogisticRegression.cs @@ -41,7 +41,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -53,7 +53,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -62,13 +62,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: True - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.63 // AUC: 0.70 @@ -90,7 +90,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -107,7 +107,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -136,7 +136,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaLogisticRegressionWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaLogisticRegressionWithOptions.cs index 9ef70a2193..c6402fa585 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaLogisticRegressionWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaLogisticRegressionWithOptions.cs @@ -53,7 +53,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -65,7 +65,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -74,13 +74,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: True - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.60 // AUC: 0.67 @@ -102,7 +102,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -119,7 +119,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -148,7 +148,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaNonCalibrated.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaNonCalibrated.cs index 73107bb4d7..88acaae6d9 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaNonCalibrated.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaNonCalibrated.cs @@ -41,7 +41,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -53,7 +53,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -62,13 +62,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: True - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.65 // AUC: 0.69 @@ -89,7 +89,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -106,7 +106,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -135,7 +135,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaNonCalibratedWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaNonCalibratedWithOptions.cs index f08d65eed1..7e5c409b2c 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaNonCalibratedWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SdcaNonCalibratedWithOptions.cs @@ -55,7 +55,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -67,7 +67,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -76,13 +76,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: True - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.61 // AUC: 0.67 @@ -104,7 +104,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -121,7 +121,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -150,7 +150,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdCalibrated.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdCalibrated.cs index dcc28ad25c..900e15a292 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdCalibrated.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdCalibrated.cs @@ -33,7 +33,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -45,7 +45,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -54,13 +54,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.60 // AUC: 0.63 @@ -82,7 +82,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -99,7 +99,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -128,7 +128,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdCalibratedWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdCalibratedWithOptions.cs index eea72de60a..3ace486e71 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdCalibratedWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdCalibratedWithOptions.cs @@ -45,7 +45,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -57,7 +57,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -66,13 +66,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.60 // AUC: 0.65 @@ -94,7 +94,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -111,7 +111,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -140,7 +140,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdNonCalibrated.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdNonCalibrated.cs index e55f8d2d65..c159a5bd44 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdNonCalibrated.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdNonCalibrated.cs @@ -33,7 +33,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -45,7 +45,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -54,13 +54,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.60 // AUC: 0.63 @@ -82,7 +82,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -99,7 +99,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -128,7 +128,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdNonCalibratedWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdNonCalibratedWithOptions.cs index 1729433565..d05d3f2a4b 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdNonCalibratedWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SgdNonCalibratedWithOptions.cs @@ -42,7 +42,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -54,7 +54,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -63,13 +63,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .EvaluateNonCalibrated(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.59 // AUC: 0.61 @@ -91,7 +91,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -108,7 +108,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.03f).ToArray() - + }; } } @@ -137,7 +137,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SymbolicSgdLogisticRegression.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SymbolicSgdLogisticRegression.cs index 172199df0c..e5002cea76 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SymbolicSgdLogisticRegression.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SymbolicSgdLogisticRegression.cs @@ -36,7 +36,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -48,7 +48,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -57,13 +57,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: True - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.69 // AUC: 0.76 @@ -85,7 +85,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -102,7 +102,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -131,7 +131,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SymbolicSgdLogisticRegressionWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SymbolicSgdLogisticRegressionWithOptions.cs index a68fadc8c8..6614b08524 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SymbolicSgdLogisticRegressionWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/SymbolicSgdLogisticRegressionWithOptions.cs @@ -45,7 +45,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data - .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -57,7 +57,7 @@ public static void Example() // Print 5 predictions. foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -66,13 +66,13 @@ public static void Example() // Label: True, Prediction: True // Label: True, Prediction: True // Label: False, Prediction: False - + // Evaluate the overall metrics. var metrics = mlContext.BinaryClassification .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Accuracy: 0.72 // AUC: 0.81 @@ -94,7 +94,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); @@ -111,7 +111,7 @@ public static void Example() Features = Enumerable.Repeat(label, 50) .Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray() - + }; } } @@ -140,7 +140,7 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics) Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); - Console.WriteLine($"Negative Precision: " + + Console.WriteLine($"Negative Precision: " + $"{metrics.NegativePrecision:F2}"); Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ImageClassificationDefault.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ImageClassificationDefault.cs index c0a691124d..c9ea3adc9a 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ImageClassificationDefault.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ImageClassificationDefault.cs @@ -63,7 +63,7 @@ public static void Example() // Create the ImageClassification pipeline by just passing the // input feature and label column name. var pipeline = mlContext.MulticlassClassification.Trainers - .ImageClassification(featureColumnName:"Image") + .ImageClassification(featureColumnName: "Image") .Append(mlContext.Transforms.Conversion.MapKeyToValue( outputColumnName: "PredictedLabel", inputColumnName: "PredictedLabel")); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/LearningRateSchedulingCifarResnetTransferLearning.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/LearningRateSchedulingCifarResnetTransferLearning.cs index 0f50d91ebd..43cfcda48c 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/LearningRateSchedulingCifarResnetTransferLearning.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/LearningRateSchedulingCifarResnetTransferLearning.cs @@ -50,7 +50,7 @@ public static void Example() // MapValueToKey : map 'string' type labels to keys // LoadImages : load raw images to "Image" column trainDataset = mlContext.Transforms.Conversion - .MapValueToKey("Label", keyOrdinality:Microsoft.ML.Transforms + .MapValueToKey("Label", keyOrdinality: Microsoft.ML.Transforms .ValueToKeyMappingEstimator.KeyOrdinality.ByValue) .Append(mlContext.Transforms.LoadRawImageBytes("Image", fullImagesetFolderPathTrain, "ImagePath")) @@ -165,7 +165,7 @@ public static void Example() { Image = testImages.First().Image }; - + // Predict on the single image. var prediction = predictionEngine.Predict(imageToPredict); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs index f17a3ca54b..1e2a5b525f 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs @@ -328,4 +328,4 @@ public class ImagePrediction public UInt32 PredictedLabel; } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningTrainTestSplit.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningTrainTestSplit.cs index eddb601fb2..6fffbe0ff4 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningTrainTestSplit.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningTrainTestSplit.cs @@ -61,7 +61,7 @@ public static void Example() // Set the options for ImageClassification. var options = new ImageClassificationTrainer.Options() - { + { FeatureColumnName = "Image", LabelColumnName = "Label", // Just by changing/selecting InceptionV3/MobilenetV2/ResnetV250 @@ -74,14 +74,14 @@ public static void Example() MetricsCallback = (metrics) => Console.WriteLine(metrics), ValidationSet = testDataset, // Disable EarlyStopping to run to specified number of epochs. - EarlyStoppingCriteria =null + EarlyStoppingCriteria = null }; // Create the ImageClassification pipeline. var pipeline = mlContext.MulticlassClassification.Trainers. ImageClassification(options) .Append(mlContext.Transforms.Conversion.MapKeyToValue( - outputColumnName: "PredictedLabel", + outputColumnName: "PredictedLabel", inputColumnName: "PredictedLabel")); @@ -136,11 +136,11 @@ public static void Example() { // Create prediction function to try one prediction. var predictionEngine = mlContext.Model - .CreatePredictionEngine(trainedModel); // Load test images. - IEnumerable testImages = + IEnumerable testImages = LoadInMemoryImagesFromDirectory(imagesForPredictions, false); // Create an in-memory image object from the first image in the test data. @@ -209,8 +209,8 @@ public static void Example() } // Load In memory raw images from directory. - public static IEnumerable - LoadInMemoryImagesFromDirectory(string folder, + public static IEnumerable + LoadInMemoryImagesFromDirectory(string folder, bool useFolderNameAsLabel = true) { var files = Directory.GetFiles(folder, "*", diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropy.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropy.cs index 7130a7c9ea..8a7933f27b 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropy.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropy.cs @@ -50,7 +50,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -65,13 +65,13 @@ public static void Example() .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Micro Accuracy: 0.91 // Macro Accuracy: 0.91 // Log Loss: 0.24 // Log Loss Reduction: 0.79 - + // Confusion table // ||======================== // PREDICTED || 0 | 1 | 2 | Recall @@ -86,7 +86,7 @@ public static void Example() // Generates random uniform doubles in [-0.5, 0.5) // range with labels 1, 2 or 3. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropyWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropyWithOptions.cs index 597f34af06..bbde2eb8a7 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropyWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LbfgsMaximumEntropyWithOptions.cs @@ -26,14 +26,14 @@ public static void Example() // Define trainer options. var options = new LbfgsMaximumEntropyMulticlassTrainer.Options - { - HistorySize = 50, - L1Regularization = 0.1f, - NumberOfThreads = 1 - }; + { + HistorySize = 50, + L1Regularization = 0.1f, + NumberOfThreads = 1 + }; // Define the trainer. - var pipeline = + var pipeline = // Convert the string labels into key types. mlContext.Transforms.Conversion.MapValueToKey("Label") // Apply LbfgsMaximumEntropy multiclass trainer. @@ -58,7 +58,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -73,7 +73,7 @@ public static void Example() .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Micro Accuracy: 0.91 // Macro Accuracy: 0.91 @@ -94,7 +94,7 @@ public static void Example() // Generates random uniform doubles in [-0.5, 0.5) // range with labels 1, 2 or 3. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbm.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbm.cs index 54200705c6..6840db2516 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbm.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbm.cs @@ -53,7 +53,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -89,7 +89,7 @@ public static void Example() // Generates random uniform doubles in [-0.5, 0.5) // range with labels 1, 2 or 3. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbmWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbmWithOptions.cs index 76337146dc..a992648f57 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbmWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LightGbmWithOptions.cs @@ -29,16 +29,16 @@ public static void Example() // Define trainer options. var options = new LightGbmMulticlassTrainer.Options - { - Booster = new DartBooster.Options() - { - TreeDropFraction = 0.15, - XgboostDartMode = false - } - }; + { + Booster = new DartBooster.Options() + { + TreeDropFraction = 0.15, + XgboostDartMode = false + } + }; // Define the trainer. - var pipeline = + var pipeline = // Convert the string labels into key types. mlContext.Transforms.Conversion.MapValueToKey("Label") // Apply LightGbm multiclass trainer. @@ -63,7 +63,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -78,13 +78,13 @@ public static void Example() .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Micro Accuracy: 0.98 // Macro Accuracy: 0.98 // Log Loss: 0.07 // Log Loss Reduction: 0.94 - + // Confusion table // ||======================== // PREDICTED || 0 | 1 | 2 | Recall @@ -99,7 +99,7 @@ public static void Example() // Generates random uniform doubles in [-0.5, 0.5) // range with labels 1, 2 or 3. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LogLossPerClass.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LogLossPerClass.cs index 8b1076f830..f69ff0c0a9 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LogLossPerClass.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/LogLossPerClass.cs @@ -109,4 +109,4 @@ private class Prediction public uint PredictedLabel { get; set; } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/NaiveBayes.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/NaiveBayes.cs index 4dc7f44f9e..4949c4d66a 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/NaiveBayes.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/NaiveBayes.cs @@ -56,7 +56,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -71,13 +71,13 @@ public static void Example() .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Micro Accuracy: 0.88 // Macro Accuracy: 0.88 // Log Loss: 34.54 // Log Loss Reduction: -30.47 - + // Confusion table // ||======================== // PREDICTED || 0 | 1 | 2 | Recall @@ -89,12 +89,12 @@ public static void Example() // Precision ||0.9467 |0.8735 |0.8061 | } - + // Generates random uniform doubles in [-0.5, 0.5) range with labels // 1, 2 or 3. For NaiveBayes values greater than zero are treated as true, // zero or less are treated as false. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/OneVersusAll.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/OneVersusAll.cs index 40a040325c..b7849bb20f 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/OneVersusAll.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/OneVersusAll.cs @@ -51,7 +51,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -66,13 +66,13 @@ public static void Example() .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Micro Accuracy: 0.90 // Macro Accuracy: 0.90 // Log Loss: 0.36 // Log Loss Reduction: 0.68 - + // Confusion table // ||======================== // PREDICTED || 0 | 1 | 2 | Recall @@ -87,7 +87,7 @@ public static void Example() // Generates random uniform doubles in [-0.5, 0.5) // range with labels 1, 2 or 3. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PairwiseCoupling.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PairwiseCoupling.cs index be42ec3bbd..b88fa29cb7 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PairwiseCoupling.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PairwiseCoupling.cs @@ -51,7 +51,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -66,7 +66,7 @@ public static void Example() .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Micro Accuracy: 0.90 // Macro Accuracy: 0.90 @@ -87,7 +87,7 @@ public static void Example() // Generates random uniform doubles in [-0.5, 0.5) // range with labels 1, 2 or 3. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PermutationFeatureImportance.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PermutationFeatureImportance.cs index 71d1c02106..6492c554ea 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PermutationFeatureImportance.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PermutationFeatureImportance.cs @@ -126,4 +126,4 @@ private class Data } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PermutationFeatureImportanceLoadFromDisk.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PermutationFeatureImportanceLoadFromDisk.cs index 975b209281..da34de5035 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PermutationFeatureImportanceLoadFromDisk.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/PermutationFeatureImportanceLoadFromDisk.cs @@ -135,4 +135,4 @@ private class Data } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropy.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropy.cs index 2a83a84f3a..f4c059ee7e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropy.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropy.cs @@ -58,7 +58,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -73,7 +73,7 @@ public static void Example() .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Micro Accuracy: 0.91 // Macro Accuracy: 0.91 @@ -93,7 +93,7 @@ public static void Example() // Generates random uniform doubles in [-0.5, 0.5) // range with labels 1, 2 or 3. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropyWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropyWithOptions.cs index 62ed191c0f..1d7ec9dc09 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropyWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaMaximumEntropyWithOptions.cs @@ -42,7 +42,7 @@ public static void Example() }; // Define the trainer. - var pipeline = + var pipeline = // Convert the string labels into key types. mlContext.Transforms.Conversion.MapValueToKey("Label") // Apply SdcaMaximumEntropy multiclass trainer. @@ -67,7 +67,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -82,7 +82,7 @@ public static void Example() .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Micro Accuracy: 0.92 // Macro Accuracy: 0.92 @@ -103,7 +103,7 @@ public static void Example() // Generates random uniform doubles in [-0.5, 0.5) // range with labels 1, 2 or 3. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibrated.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibrated.cs index 6a899f9432..d45f429134 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibrated.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibrated.cs @@ -58,7 +58,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -73,7 +73,7 @@ public static void Example() .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Micro Accuracy: 0.91 // Macro Accuracy: 0.91 @@ -94,7 +94,7 @@ public static void Example() // Generates random uniform doubles in [-0.5, 0.5) // range with labels 1, 2 or 3. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibratedWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibratedWithOptions.cs index 6367e9fe21..f005763927 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibratedWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/SdcaNonCalibratedWithOptions.cs @@ -42,7 +42,7 @@ public static void Example() }; // Define the trainer. - var pipeline = + var pipeline = // Convert the string labels into key types. mlContext.Transforms.Conversion.MapValueToKey("Label") // Apply SdcaNonCalibrated multiclass trainer. @@ -67,7 +67,7 @@ public static void Example() // Look at 5 predictions foreach (var p in predictions.Take(5)) - Console.WriteLine($"Label: {p.Label}, " + + Console.WriteLine($"Label: {p.Label}, " + $"Prediction: {p.PredictedLabel}"); // Expected output: @@ -82,7 +82,7 @@ public static void Example() .Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // Micro Accuracy: 0.91 // Macro Accuracy: 0.91 @@ -103,7 +103,7 @@ public static void Example() // Generates random uniform doubles in [-0.5, 0.5) // range with labels 1, 2 or 3. private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/FastTree.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/FastTree.cs index 9952909c64..8b1afd4cbf 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/FastTree.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/FastTree.cs @@ -35,7 +35,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data.LoadFromEnumerable( - GenerateRandomDataPoints(500, seed:123)); + GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -62,7 +62,7 @@ public static void Example() // Evaluate the overall metrics. var metrics = mlContext.Ranking.Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // DCG: @1:41.95, @2:63.33, @3:75.65 // NDCG: @1:0.99, @2:0.98, @3:0.99 @@ -84,7 +84,7 @@ public static void Example() // For data points with larger labels, the feature values are // slightly increased by adding a constant. Features = Enumerable.Repeat(label, 50).Select( - x => randomFloat() + x * 0.1f).ToArray() + x => randomFloat() + x * 0.1f).ToArray() }; } } @@ -116,7 +116,7 @@ public static void PrintMetrics(RankingMetrics metrics) Console.WriteLine("DCG: " + string.Join(", ", metrics.DiscountedCumulativeGains.Select( (d, i) => (i + 1) + ":" + d + ":F2").ToArray())); - + Console.WriteLine("NDCG: " + string.Join(", ", metrics.NormalizedDiscountedCumulativeGains.Select( (d, i) => (i + 1) + ":" + d + ":F2").ToArray())); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/FastTreeWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/FastTreeWithOptions.cs index 5d575ec160..3836e67b3e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/FastTreeWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/FastTreeWithOptions.cs @@ -28,7 +28,7 @@ public static void Example() var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints); // Define trainer options. - var options = new FastTreeRankingTrainer.Options + var options = new FastTreeRankingTrainer.Options { // Use NdcgAt3 for early stopping. EarlyStoppingMetric = EarlyStoppingRankingMetric.NdcgAt3, @@ -49,7 +49,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data.LoadFromEnumerable( - GenerateRandomDataPoints(500, seed:123)); + GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -76,7 +76,7 @@ public static void Example() // Evaluate the overall metrics. var metrics = mlContext.Ranking.Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // DCG: @1:40.57, @2:61.21, @3:74.11 // NDCG: @1:0.96, @2:0.95, @3:0.97 diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/LightGbm.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/LightGbm.cs index d6b3945d2b..4b6e5fe1e8 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/LightGbm.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/LightGbm.cs @@ -33,9 +33,9 @@ public static void Example() var model = pipeline.Fit(trainingData); // Create testing data. Use different random seed to make it different - // from training data. + // from training data. var testData = mlContext.Data.LoadFromEnumerable( - GenerateRandomDataPoints(500, seed:123)); + GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -62,7 +62,7 @@ public static void Example() // Evaluate the overall metrics. var metrics = mlContext.Ranking.Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // DCG: @1:41.95, @2:63.76, @3:75.97 // NDCG: @1:0.99, @2:0.99, @3:0.99 @@ -84,7 +84,7 @@ public static void Example() // For data points with larger labels, the feature values are // slightly increased by adding a constant. Features = Enumerable.Repeat(label, 50).Select( - x => randomFloat() + x * 0.1f).ToArray() + x => randomFloat() + x * 0.1f).ToArray() }; } } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/LightGbmWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/LightGbmWithOptions.cs index 4859cd7b81..90c7def55f 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/LightGbmWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/LightGbmWithOptions.cs @@ -50,7 +50,7 @@ public static void Example() // Create testing data. Use different random seed to make it different // from training data. var testData = mlContext.Data.LoadFromEnumerable( - GenerateRandomDataPoints(500, seed:123)); + GenerateRandomDataPoints(500, seed: 123)); // Run the model on test data set. var transformedTestData = model.Transform(testData); @@ -77,7 +77,7 @@ public static void Example() // Evaluate the overall metrics. var metrics = mlContext.Ranking.Evaluate(transformedTestData); PrintMetrics(metrics); - + // Expected output: // DCG: @1:28.83, @2:46.36, @3:56.18 // NDCG: @1:0.69, @2:0.72, @3:0.74 @@ -99,7 +99,7 @@ public static void Example() // For data points with larger labels, the feature values are // slightly increased by adding a constant. Features = Enumerable.Repeat(label, 50).Select( - x => randomFloat() + x * 0.1f).ToArray() + x => randomFloat() + x * 0.1f).ToArray() }; } } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/PermutationFeatureImportance.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/PermutationFeatureImportance.cs index 04b82151ac..faf5f44cd1 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/PermutationFeatureImportance.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/PermutationFeatureImportance.cs @@ -12,7 +12,7 @@ public static void Example() // Create a new context for ML.NET operations. It can be used for // exception tracking and logging, as a catalog of available operations // and as the source of randomness. - var mlContext = new MLContext(seed:1); + var mlContext = new MLContext(seed: 1); // Create sample data. var samples = GenerateData(); @@ -48,8 +48,11 @@ public static void Example() // Now let's look at which features are most important to the model // overall. Get the feature indices sorted by their impact on NDCG@1. - var sortedIndices = permutationMetrics.Select((metrics, index) => new { - index, metrics.NormalizedDiscountedCumulativeGains}) + var sortedIndices = permutationMetrics.Select((metrics, index) => new + { + index, + metrics.NormalizedDiscountedCumulativeGains + }) .OrderByDescending(feature => Math.Abs( feature.NormalizedDiscountedCumulativeGains[0].Mean)) @@ -120,7 +123,7 @@ private class Data }; // Create a noisy label. - var value = (float)(bias + weight1 * data.Feature1 + weight2 * + var value = (float)(bias + weight1 * data.Feature1 + weight2 * data.Feature2 + rng.NextDouble() - 0.5); if (value < max / 3) data.Label = 0; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/PermutationFeatureImportanceLoadFromDisk.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/PermutationFeatureImportanceLoadFromDisk.cs index e12e17554b..b81b5b7bcc 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/PermutationFeatureImportanceLoadFromDisk.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Ranking/PermutationFeatureImportanceLoadFromDisk.cs @@ -55,7 +55,8 @@ public static void Example() // Now let's look at which features are most important to the model // overall. Get the feature indices sorted by their impact on NDCG@1. - var sortedIndices = permutationMetrics.Select((metrics, index) => new { + var sortedIndices = permutationMetrics.Select((metrics, index) => new + { index, metrics.NormalizedDiscountedCumulativeGains }) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/MatrixFactorization.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/MatrixFactorization.cs index 280e58de1d..7d062833ca 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/MatrixFactorization.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/MatrixFactorization.cs @@ -8,7 +8,7 @@ namespace Samples.Dynamic.Trainers.Recommendation { public static class MatrixFactorization { - + // This example requires installation of additional nuget package at // for Microsoft.ML.Recommender at // https://www.nuget.org/packages/Microsoft.ML.Recommender/ @@ -92,8 +92,12 @@ private static List GenerateMatrix() var dataMatrix = new List(); for (uint i = 0; i < MatrixColumnCount; ++i) for (uint j = 0; j < MatrixRowCount; ++j) - dataMatrix.Add(new MatrixElement() { MatrixColumnIndex = i, - MatrixRowIndex = j, Value = (i + j) % 5 }); + dataMatrix.Add(new MatrixElement() + { + MatrixColumnIndex = i, + MatrixRowIndex = j, + Value = (i + j) % 5 + }); return dataMatrix; } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/MatrixFactorizationWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/MatrixFactorizationWithOptions.cs index d0dbae414b..9fb7c82aca 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/MatrixFactorizationWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/MatrixFactorizationWithOptions.cs @@ -9,7 +9,7 @@ namespace Samples.Dynamic.Trainers.Recommendation { public static class MatrixFactorizationWithOptions { - + // This example requires installation of additional nuget package at // for Microsoft.ML.Recommender at // https://www.nuget.org/packages/Microsoft.ML.Recommender/ @@ -116,8 +116,12 @@ private static List GenerateMatrix() var dataMatrix = new List(); for (uint i = 0; i < MatrixColumnCount; ++i) for (uint j = 0; j < MatrixRowCount; ++j) - dataMatrix.Add(new MatrixElement() { MatrixColumnIndex = i, - MatrixRowIndex = j, Value = (i + j) % 5 }); + dataMatrix.Add(new MatrixElement() + { + MatrixColumnIndex = i, + MatrixRowIndex = j, + Value = (i + j) % 5 + }); return dataMatrix; } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/OneClassMatrixFactorizationWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/OneClassMatrixFactorizationWithOptions.cs index 2553efe5b0..66b23ae613 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/OneClassMatrixFactorizationWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Recommendation/OneClassMatrixFactorizationWithOptions.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.Linq; using Microsoft.ML; @@ -45,12 +45,12 @@ public static void Example() { MatrixColumnIndexColumnName = nameof( MatrixElement.MatrixColumnIndex), - MatrixRowIndexColumnName = nameof(MatrixElement.MatrixRowIndex), - LabelColumnName = nameof(MatrixElement.Value), - NumberOfIterations = 20, - NumberOfThreads = 8, - ApproximationRank = 32, - Alpha = 1, + MatrixRowIndexColumnName = nameof(MatrixElement.MatrixRowIndex), + LabelColumnName = nameof(MatrixElement.Value), + NumberOfIterations = 20, + NumberOfThreads = 8, + ApproximationRank = 32, + Alpha = 1, // The desired values of matrix elements not specified in the // training set. If the training set doesn't tell the value at the @@ -204,4 +204,4 @@ private class MatrixElement } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs index 3cd835f36b..dcdc9ebe48 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForest.cs @@ -70,7 +70,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs index 8dff60a46d..cc069c699d 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastForestWithOptions.cs @@ -83,7 +83,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs index 749e855fab..8172b572cc 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTree.cs @@ -70,7 +70,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs index 0b7efd781a..86d28c949d 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedie.cs @@ -70,7 +70,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs index 8bda309469..91f1d0b861 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeTweedieWithOptions.cs @@ -85,7 +85,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs index d9d82d259c..85b9b2fa35 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/FastTreeWithOptions.cs @@ -34,7 +34,7 @@ public static void Example() FeatureColumnName = nameof(DataPoint.Features), // Use L2-norm for early stopping. If the gradient's L2-norm is // smaller than an auto-computed value, training process will stop. - EarlyStoppingMetric = + EarlyStoppingMetric = Microsoft.ML.Trainers.FastTree.EarlyStoppingMetric.L2Norm, // Create a simpler model by penalizing usage of new features. @@ -86,7 +86,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs index f62cf56d54..a13db16811 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Gam.cs @@ -70,7 +70,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs index c83ccea1c6..127640a6b4 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/GamWithOptions.cs @@ -81,7 +81,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs index a95d542607..492bf91c74 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegression.cs @@ -68,7 +68,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs index 421b8ab8f9..0f5761c1df 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LbfgsPoissonRegressionWithOptions.cs @@ -82,7 +82,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs index 66527d5cf2..9733e986a2 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbm.cs @@ -71,7 +71,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs index 1b9bf4b1d0..39a61eb2f8 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptions.cs @@ -44,7 +44,7 @@ public static void Example() { TopRate = 0.3, OtherRate = 0.2 - } + } }; // Define the trainer. @@ -90,7 +90,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptionsAdvanced.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptionsAdvanced.cs index ea98c44670..3c68fa7165 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptionsAdvanced.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/LightGbmWithOptionsAdvanced.cs @@ -41,17 +41,17 @@ public static void Example() "Features", featureNames) .Append(mlContext.Regression.Trainers.LightGbm( new LightGbmRegressionTrainer.Options + { + LabelColumnName = labelName, + NumberOfLeaves = 4, + MinimumExampleCountPerLeaf = 6, + LearningRate = 0.001, + Booster = new GossBooster.Options() { - LabelColumnName = labelName, - NumberOfLeaves = 4, - MinimumExampleCountPerLeaf = 6, - LearningRate = 0.001, - Booster = new GossBooster.Options() - { - TopRate = 0.3, - OtherRate = 0.2 - } - })); + TopRate = 0.3, + OtherRate = 0.2 + } + })); // Fit this pipeline to the training data. var model = pipeline.Fit(split.TrainSet); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs index e02d4c626f..437065c3ea 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescent.cs @@ -55,11 +55,11 @@ public static void Example() var metrics = mlContext.Regression.Evaluate(transformedTestData); PrintMetrics(metrics); - + } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs index 59a7d1fdbc..4d65fb0e3f 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OnlineGradientDescentWithOptions.cs @@ -25,7 +25,7 @@ public static void Example() var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints); // Define trainer options. - var options = new OnlineGradientDescentTrainer.Options + var options = new OnlineGradientDescentTrainer.Options { LabelColumnName = nameof(DataPoint.Label), FeatureColumnName = nameof(DataPoint.Features), @@ -75,7 +75,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs index 75c11b3417..6b37692400 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquares.cs @@ -67,7 +67,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresAdvanced.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresAdvanced.cs index 9f59a163f2..e1ea4e73a9 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresAdvanced.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresAdvanced.cs @@ -30,15 +30,15 @@ public static void Example() // Here only seven numeric columns are used as features var dataView = mlContext.Data.LoadFromTextFile(dataFile, new TextLoader.Options - { - Separators = new[] { '\t' }, - HasHeader = true, - Columns = new[] + { + Separators = new[] { '\t' }, + HasHeader = true, + Columns = new[] { new TextLoader.Column("Label", DataKind.Single, 0), new TextLoader.Column("Features", DataKind.Single, 1, 6) } - }); + }); //////////////////// Data Preview //////////////////// // MedianHomeValue CrimesPerCapita PercentResidental PercentNonRetail CharlesRiver NitricOxides RoomsPerDwelling PercentPre40s @@ -64,7 +64,7 @@ public static void Example() var metrics = mlContext.Regression.Evaluate(dataWithPredictions); PrintMetrics(metrics); - + // Expected output: // L1: 4.15 // L2: 31.98 diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs index 75b3215932..8dda84aff2 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptions.cs @@ -79,7 +79,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptionsAdvanced.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptionsAdvanced.cs index 7a8a265c49..8e9e150f5d 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptionsAdvanced.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/OrdinaryLeastSquaresWithOptionsAdvanced.cs @@ -30,15 +30,15 @@ public static void Example() // Here only seven numeric columns are used as features var dataView = mlContext.Data.LoadFromTextFile(dataFile, new TextLoader.Options - { - Separators = new[] { '\t' }, - HasHeader = true, - Columns = new[] + { + Separators = new[] { '\t' }, + HasHeader = true, + Columns = new[] { new TextLoader.Column("Label", DataKind.Single, 0), new TextLoader.Column("Features", DataKind.Single, 1, 6) } - }); + }); //////////////////// Data Preview //////////////////// // MedianHomeValue CrimesPerCapita PercentResidental PercentNonRetail CharlesRiver NitricOxides RoomsPerDwelling PercentPre40s @@ -52,10 +52,10 @@ public static void Example() // as data is already processed in a form consumable by the trainer var pipeline = mlContext.Regression.Trainers.Ols( new OlsTrainer.Options() - { - L2Regularization = 0.1f, - CalculateStatistics = false - }); + { + L2Regularization = 0.1f, + CalculateStatistics = false + }); var model = pipeline.Fit(split.TrainSet); // Check the weights that the model learned @@ -68,7 +68,7 @@ public static void Example() var metrics = mlContext.Regression.Evaluate(dataWithPredictions); PrintMetrics(metrics); - + // Expected output: // L1: 4.14 // L2: 32.35 diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportance.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportance.cs index dfa04d1b76..49c4e2695e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportance.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportance.cs @@ -12,7 +12,7 @@ public static void Example() // Create a new context for ML.NET operations. It can be used for // exception tracking and logging, as a catalog of available operations // and as the source of randomness. - var mlContext = new MLContext(seed:1); + var mlContext = new MLContext(seed: 1); // Create sample data. var samples = GenerateData(); @@ -49,8 +49,11 @@ public static void Example() // Now let's look at which features are most important to the model // overall. Get the feature indices sorted by their impact on RMSE. var sortedIndices = permutationMetrics - .Select((metrics, index) => new { index, - metrics.RootMeanSquaredError}) + .Select((metrics, index) => new + { + index, + metrics.RootMeanSquaredError + }) .OrderByDescending(feature => Math.Abs( feature.RootMeanSquaredError.Mean)) @@ -114,7 +117,7 @@ private class Data }; // Create a noisy label. - data.Label = (float)(bias + weight1 * data.Feature1 + weight2 * + data.Label = (float)(bias + weight1 * data.Feature1 + weight2 * data.Feature2 + rng.NextDouble() - 0.5); yield return data; } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportanceLoadFromDisk.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportanceLoadFromDisk.cs index 1a783b6c2a..9bfe4297af 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportanceLoadFromDisk.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/PermutationFeatureImportanceLoadFromDisk.cs @@ -56,7 +56,8 @@ public static void Example() // Now let's look at which features are most important to the model // overall. Get the feature indices sorted by their impact on RMSE. var sortedIndices = permutationMetrics - .Select((metrics, index) => new { + .Select((metrics, index) => new + { index, metrics.RootMeanSquaredError }) @@ -129,4 +130,4 @@ private class Data } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs index f552883ca2..0b2c4ad860 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/Sdca.cs @@ -67,7 +67,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs index c836c376c0..09ae1500aa 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/Regression/SdcaWithOptions.cs @@ -83,7 +83,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ApplyOnnxModel.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ApplyOnnxModel.cs index adb2268060..4c3cdf89ed 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ApplyOnnxModel.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ApplyOnnxModel.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Linq; using Microsoft.ML; using Microsoft.ML.Data; @@ -86,4 +86,4 @@ class Prediction public float[] softmaxout_1 { get; set; } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CalculateFeatureContribution.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CalculateFeatureContribution.cs index 2f0f5b464e..0f55e1a00f 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CalculateFeatureContribution.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CalculateFeatureContribution.cs @@ -39,8 +39,8 @@ public static void Example() var linearModel = linearTrainer.Fit(transformedData); // Print the model parameters. Console.WriteLine($"Linear Model Parameters"); - Console.WriteLine("Bias: " + linearModel.Model.Bias+ " Feature1: " + - linearModel.Model.Weights[0] + " Feature2: " +linearModel.Model + Console.WriteLine("Bias: " + linearModel.Model.Bias + " Feature1: " + + linearModel.Model.Weights[0] + " Feature2: " + linearModel.Model .Weights[1]); // Define a feature contribution calculator for all the features, and @@ -72,7 +72,7 @@ public static void Example() Console.WriteLine("Label: " + prediction.Label + " Prediction: " + prediction.Score); - Console.WriteLine("Feature1: " + prediction.Features[0] + + Console.WriteLine("Feature1: " + prediction.Features[0] + " Feature2: " + prediction.Features[1]); Console.WriteLine("Feature Contributions: " + prediction diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CalculateFeatureContributionCalibrated.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CalculateFeatureContributionCalibrated.cs index adaf66389c..7090a50438 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CalculateFeatureContributionCalibrated.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CalculateFeatureContributionCalibrated.cs @@ -22,7 +22,7 @@ public static void Example() // Create a pipeline to concatenate the features into a feature vector // and normalize it. - var transformPipeline = mlContext.Transforms.Concatenate("Features", + var transformPipeline = mlContext.Transforms.Concatenate("Features", new string[] { nameof(Data.Feature1), nameof(Data.Feature2) }) .Append(mlContext.Transforms.NormalizeMeanVariance("Features")); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotEncoding.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotEncoding.cs index 041e4a36b3..1d6706993e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotEncoding.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotEncoding.cs @@ -91,4 +91,4 @@ private class DataPoint public string Education { get; set; } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotEncodingMultiColumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotEncodingMultiColumn.cs index e2cad80e28..6fc660e1b5 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotEncodingMultiColumn.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotEncodingMultiColumn.cs @@ -72,4 +72,4 @@ private class TransformedData public float[] ZipCode { get; set; } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotHashEncoding.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotHashEncoding.cs index 446424f3b9..49f1fac8e6 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotHashEncoding.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotHashEncoding.cs @@ -90,4 +90,4 @@ private class DataPoint public string Education { get; set; } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotHashEncodingMultiColumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotHashEncodingMultiColumn.cs index e786123141..d5b8a53746 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotHashEncodingMultiColumn.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Categorical/OneHotHashEncodingMultiColumn.cs @@ -75,4 +75,4 @@ private class TransformedData public float[] ZipCode { get; set; } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/ConvertType.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/ConvertType.cs index 610a77983a..625b2e7e13 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/ConvertType.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/ConvertType.cs @@ -56,4 +56,4 @@ private sealed class TransformedData : InputData public Int32 SurvivedInt32 { get; set; } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/ConvertTypeMultiColumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/ConvertTypeMultiColumn.cs index bae0bfea02..61ab96f71d 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/ConvertTypeMultiColumn.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/ConvertTypeMultiColumn.cs @@ -9,7 +9,7 @@ namespace Samples.Dynamic // This is often a useful data transformation before concatenating the features // together and passing them to a particular estimator. public static class ConvertTypeMultiColumn - { + { public static void Example() { // Create a new ML context, for ML.NET operations. It can be used for @@ -43,7 +43,7 @@ public static void Example() new InputOutputColumnPair("Converted2", "Feature2"), new InputOutputColumnPair("Converted3", "Feature3"), new InputOutputColumnPair("Converted4", "Feature4"), - }, + }, DataKind.Single); // Let's fit our pipeline to the data. diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/Hash.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/Hash.cs index d535f90d67..514d395345 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/Hash.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/Hash.cs @@ -107,4 +107,4 @@ public class TransformedDataPoint : DataPoint } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/HashWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/HashWithOptions.cs index fee28e5bb0..9c970816c0 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/HashWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/HashWithOptions.cs @@ -121,4 +121,4 @@ public class TransformedDataPoint : DataPoint } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVector.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVector.cs index fd1646f014..6f1464699a 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVector.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapKeyToVector.cs @@ -44,7 +44,7 @@ public static void Example() .Append(mlContext.Transforms.Concatenate("Parts", "PartA", "PartB")) .Append(mlContext.Transforms.Conversion.MapValueToKey("Parts")) .Append(mlContext.Transforms.Conversion.MapKeyToVector( - "PartsCount", "Parts", outputCountVector:true)) + "PartsCount", "Parts", outputCountVector: true)) .Append(mlContext.Transforms.Conversion.MapKeyToVector( "PartsNoCount", "Parts")); @@ -61,7 +61,7 @@ public static void Example() foreach (var featureRow in features) Console.WriteLine(featureRow.Timeframe + " " + - string.Join(',', featureRow.TimeframeVector.Select(x=>x)) + " " + string.Join(',', featureRow.TimeframeVector.Select(x => x)) + " " + string.Join(',', featureRow.PartsCount.Select(x => x)) + " " + string.Join(',', featureRow.PartsNoCount.Select( x => x))); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapValueToArray.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapValueToArray.cs index 57bff5a3b8..78c8bfac0c 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapValueToArray.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Conversion/MapValueToArray.cs @@ -1,11 +1,11 @@ -using System; +using System; using System.Collections.Generic; using Microsoft.ML; namespace Samples.Dynamic { public static class MapValueToArray - { + { /// This example demonstrates the use of MapValue by mapping strings to /// array values, which allows for mapping data to numeric arrays. This /// functionality is useful when the generated column will serve as the @@ -76,4 +76,4 @@ public class TransformedData : DataPoint public int[] Features { get; set; } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMapping.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMapping.cs index 8bfdef80a0..2a96b2d935 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMapping.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMapping.cs @@ -28,7 +28,7 @@ public static void Example() // We define the custom mapping between input and output rows that will // be applied by the transformation. - Action mapping = + Action mapping = (input, output) => output.IsUnderThirty = input.Age < 30; // Custom transformations can be used to transform data directly, or as diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingSaveAndLoad.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingSaveAndLoad.cs index 57b52c342c..aa55c522d2 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingSaveAndLoad.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingSaveAndLoad.cs @@ -52,7 +52,7 @@ public static void Example() // Now the transform pipeline can be saved and loaded through the usual // MLContext method. mlContext.Model.Save(transformer, data.Schema, "customTransform.zip"); - var loadedTransform = mlContext.Model.Load("customTransform.zip", out + var loadedTransform = mlContext.Model.Load("customTransform.zip", out var inputSchema); // Now we can transform the data and look at the output to confirm the diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingWithInMemoryCustomType.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingWithInMemoryCustomType.cs index eddf2dcebe..af59813d89 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingWithInMemoryCustomType.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingWithInMemoryCustomType.cs @@ -37,7 +37,7 @@ static public void Example() var firstAlien = mlContext.Data.CreateEnumerable( tribeTransformed, false).First(); - Console.WriteLine("We got a super alien with name " + firstAlien.Name + + Console.WriteLine("We got a super alien with name " + firstAlien.Name + ", age " + firstAlien.Merged.Age + ", " + "height " + firstAlien .Merged.Height + ", weight " + firstAlien.Merged.Weight + ", and " + firstAlien.Merged.HandCount + " hands."); @@ -52,7 +52,7 @@ static public void Example() var alien = new AlienHero("TEN.LM", 1, 2, 3, 4, 5, 6, 7, 8); var superAlien = engine.Predict(alien); Console.Write("We got a super alien with name " + superAlien.Name + - ", age " + superAlien.Merged.Age + ", height " + + ", age " + superAlien.Merged.Age + ", height " + superAlien.Merged.Height + ", weight " + superAlien.Merged.Weight + ", and " + superAlien.Merged.HandCount + " hands."); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnCount.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnCount.cs index 2b5ec15af4..254d1b9c28 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnCount.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnCount.cs @@ -21,7 +21,7 @@ public static void Example() foreach (var item in rawData) Console.WriteLine("{0,-25} {1,-25}", string.Join(",", item .NumericVector), string.Join(",", item.StringVector)); - + // NumericVector StringVector // 4,NaN,6 A,WA,Male // 4,5,6 A,,Female @@ -33,10 +33,10 @@ public static void Example() // We will use the SelectFeaturesBasedOnCount to retain only those slots // which have at least 'count' non-default and non-missing values per // slot. - var pipeline = + var pipeline = mlContext.Transforms.FeatureSelection.SelectFeaturesBasedOnCount( outputColumnName: "NumericVector", count: 3) // Usage on numeric - // column. + // column. .Append(mlContext.Transforms.FeatureSelection .SelectFeaturesBasedOnCount(outputColumnName: "StringVector", count: 3)); // Usage on text column. diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnMutualInformation.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnMutualInformation.cs index 79d1a06021..c0523c53c2 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnMutualInformation.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnMutualInformation.cs @@ -35,7 +35,7 @@ public static void Example() // information between that slot and a specified label. var pipeline = mlContext.Transforms.FeatureSelection .SelectFeaturesBasedOnMutualInformation(outputColumnName: - "NumericVector", labelColumnName: "Label", slotsInOutput:2); + "NumericVector", labelColumnName: "Label", slotsInOutput: 2); // The pipeline can then be trained, using .Fit(), and the resulting // transformer can be used to transform data. @@ -48,7 +48,7 @@ public static void Example() Console.WriteLine($"NumericVector"); foreach (var item in convertedData) Console.WriteLine("{0,-25}", string.Join(",", item.NumericVector)); - + // NumericVector // 4,0 // 0,5 diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnMutualInformationMultiColumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnMutualInformationMultiColumn.cs index 138abcbd60..dc42ca04a0 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnMutualInformationMultiColumn.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/FeatureSelection/SelectFeaturesBasedOnMutualInformationMultiColumn.cs @@ -37,7 +37,7 @@ public static void Example() // Multi column example : This pipeline transform two columns using the // provided parameters. var pipeline = mlContext.Transforms.FeatureSelection - .SelectFeaturesBasedOnMutualInformation(new InputOutputColumnPair[] + .SelectFeaturesBasedOnMutualInformation(new InputOutputColumnPair[] { new InputOutputColumnPair("NumericVectorA"), new InputOutputColumnPair("NumericVectorB") }, labelColumnName: "Label", slotsInOutput: 4); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ConvertToImage.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ConvertToImage.cs index f0f58c7e45..0d817de957 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ConvertToImage.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ConvertToImage.cs @@ -70,7 +70,7 @@ private static void PrintColumns(IDataView transformedData) var imageGetter = cursor.GetGetter(cursor.Schema["Image"]); while (cursor.MoveNext()) { - + featuresGetter(ref features); pixelsGetter(ref pixels); imageGetter(ref imageObject); @@ -98,8 +98,11 @@ private class DataPoint var random = new Random(seed); for (int i = 0; i < count; i++) - yield return new DataPoint { Features = Enumerable.Repeat(0, - inputSize).Select(x => (float)random.Next(0, 256)).ToArray() }; + yield return new DataPoint + { + Features = Enumerable.Repeat(0, + inputSize).Select(x => (float)random.Next(0, 256)).ToArray() + }; } } } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ExtractPixels.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ExtractPixels.cs index 68fb1eb57c..b16d37b40d 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ExtractPixels.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ExtractPixels.cs @@ -69,7 +69,7 @@ private static void PrintColumns(IDataView transformedData) { Console.WriteLine("{0, -25} {1, -25} {2, -25} {3, -25} {4, -25}", "ImagePath", "Name", "ImageObject", "ImageObjectResized", "Pixels"); - + using (var cursor = transformedData.GetRowCursor(transformedData .Schema)) { @@ -100,7 +100,7 @@ private static void PrintColumns(IDataView transformedData) while (cursor.MoveNext()) { - + imagePathGetter(ref imagePath); nameGetter(ref name); imageObjectGetter(ref imageObject); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/LoadImages.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/LoadImages.cs index 2fb1f19efa..2113d2a92b 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/LoadImages.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/LoadImages.cs @@ -83,7 +83,7 @@ private static void PrintColumns(IDataView transformedData) while (cursor.MoveNext()) { - + imagePathGetter(ref imagePath); nameGetter(ref name); imageObjectGetter(ref imageObject); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/IndicateMissingValues.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/IndicateMissingValues.cs index b4eb2bf21e..854299ae25 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/IndicateMissingValues.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/IndicateMissingValues.cs @@ -64,4 +64,4 @@ private sealed class SampleDataTransformed : DataPoint public bool[] MissingIndicator { get; set; } } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/IndicateMissingValuesMultiColumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/IndicateMissingValuesMultiColumn.cs index 38750ed0a6..fbec5ef3b0 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/IndicateMissingValuesMultiColumn.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/IndicateMissingValuesMultiColumn.cs @@ -78,4 +78,4 @@ private sealed class SampleDataTransformed : DataPoint } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeBinningMulticolumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeBinningMulticolumn.cs index 9d6ef29ed7..5a2790d9a7 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeBinningMulticolumn.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeBinningMulticolumn.cs @@ -48,9 +48,9 @@ public static void Example() var column = transformedData.GetColumn("Features").ToArray(); var column2 = transformedData.GetColumn("Features2").ToArray(); - for(int i=0; i< column.Length; i++) + for (int i = 0; i < column.Length; i++) Console.WriteLine(string.Join(", ", column[i].Select(x => x - .ToString("f4")))+"\t\t"+column2[i]); + .ToString("f4"))) + "\t\t" + column2[i]); // Expected output: // // Features Feature2 diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeGlobalContrast.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeGlobalContrast.cs index 8d4795a1b6..739204445e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeGlobalContrast.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeGlobalContrast.cs @@ -24,8 +24,8 @@ public static void Example() // ML.NET. var data = mlContext.Data.LoadFromEnumerable(samples); var approximation = mlContext.Transforms.NormalizeGlobalContrast( - "Features", ensureZeroMean: false, scale:2, - ensureUnitStandardDeviation:true); + "Features", ensureZeroMean: false, scale: 2, + ensureUnitStandardDeviation: true); // Now we can transform the data and look at the output to confirm the // behavior of the estimator. This operation doesn't actually evaluate diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeLogMeanVariance.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeLogMeanVariance.cs index 3ef43d9f85..483b1bb07b 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeLogMeanVariance.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeLogMeanVariance.cs @@ -75,7 +75,7 @@ public static void Example() "produce by:"); Console.WriteLine("y = 0.5* (1 + ERF((Math.Log(x)- " + transformParams - .Mean[1] + ") / (" + transformParams.StandardDeviation[1] + + .Mean[1] + ") / (" + transformParams.StandardDeviation[1] + " * sqrt(2)))"); // ERF is https://en.wikipedia.org/wiki/Error_function. diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMeanVariance.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMeanVariance.cs index 4ff6f084f5..1df68a782c 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMeanVariance.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMeanVariance.cs @@ -83,7 +83,7 @@ public static void Example() // y = 0.5 * (1 + ERF((x - 0.5) / (1.118034 * sqrt(2))) var noCdfParams = normalizeNoCdfTransform - .GetNormalizerModelParameters(0) as + .GetNormalizerModelParameters(0) as AffineNormalizerModelParameters>; var offset = noCdfParams.Offset.Length == 0 ? 0 : noCdfParams.Offset[1]; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMinMax.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMinMax.cs index 5af18b26c5..c1edd091a5 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMinMax.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMinMax.cs @@ -74,9 +74,9 @@ public static void Example() Console.WriteLine($"The 1-index value in resulting array would be " + $"produced by:"); - Console.WriteLine(" y = (x - (" + (transformParams.Offset.Length == 0 ? - 0 : transformParams.Offset[1]) + ")) * " + transformParams - .Scale[1]); + Console.WriteLine(" y = (x - (" + (transformParams.Offset.Length == 0 ? + 0 : transformParams.Offset[1]) + ")) * " + transformParams + .Scale[1]); // Expected output: // The 1-index value in resulting array would be produce by: // y = (x - (-1)) * 0.3333333 diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMinMaxMulticolumn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMinMaxMulticolumn.cs index 0bd218e8a0..639245687d 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMinMaxMulticolumn.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeMinMaxMulticolumn.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; @@ -16,22 +16,22 @@ public static void Example() var samples = new List() { new DataPoint() - { + { Features = new float[4] { 1, 1, 3, 0 }, Features2 = new float[3] { 1, 2, 3 } }, new DataPoint() - { + { Features = new float[4] { 2, 2, 2, 0 }, Features2 = new float[3] { 3, 4, 5 } }, new DataPoint() - { + { Features = new float[4] { 0, 0, 1, 0 }, Features2 = new float[3] { 6, 7, 8 } }, new DataPoint() - { + { Features = new float[4] {-1,-1,-1, 1 }, Features2 = new float[3] { 9, 0, 4 } } @@ -40,7 +40,7 @@ public static void Example() // Convert training data to IDataView, the general data type used in // ML.NET. var data = mlContext.Data.LoadFromEnumerable(samples); - + var columnPair = new[] { new InputOutputColumnPair("Features"), @@ -71,7 +71,7 @@ public static void Example() for (int i = 0; i < column.Length; i++) Console.WriteLine(string.Join(", ", column[i].Select(x => x - .ToString("f4"))) + "\t\t" + + .ToString("f4"))) + "\t\t" + string.Join(", ", column2[i].Select(x => x.ToString("f4")))); // Expected output: diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeSupervisedBinning.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeSupervisedBinning.cs index 9441324639..1cfe33f8c0 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeSupervisedBinning.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/NormalizeSupervisedBinning.cs @@ -95,7 +95,7 @@ public static void Example() Console.WriteLine($"The 1-index value in resulting array would be " + $"produce by:"); - Console.WriteLine("y = (Index(x) / " + transformParams.Density[0] + + Console.WriteLine("y = (Index(x) / " + transformParams.Density[0] + ") - " + (transformParams.Offset.Length == 0 ? 0 : transformParams .Offset[0])); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Projection/VectorWhiten.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Projection/VectorWhiten.cs index 5e5548227b..3f5e63a5f9 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Projection/VectorWhiten.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Projection/VectorWhiten.cs @@ -8,7 +8,7 @@ namespace Samples.Dynamic { public sealed class VectorWhiten { - + /// This example requires installation of additional nuget package /// Microsoft.ML.Mkl.Components. public static void Example() @@ -41,7 +41,7 @@ public static void Example() foreach (var row in column) Console.WriteLine(string.Join(" ", row.DenseValues().Select(x => - x.ToString("f3")))+" "); + x.ToString("f3"))) + " "); }; // A pipeline to project Features column into white noise vector. @@ -82,11 +82,17 @@ private class SampleVectorOfNumbersData GetVectorOfNumbersData() { var data = new List(); - data.Add(new SampleVectorOfNumbersData { Features = new float[10] { 0, - 1, 2, 3, 4, 5, 6, 7, 8, 9 } }); + data.Add(new SampleVectorOfNumbersData + { + Features = new float[10] { 0, + 1, 2, 3, 4, 5, 6, 7, 8, 9 } + }); - data.Add(new SampleVectorOfNumbersData { Features = new float[10] { 1, - 2, 3, 4, 5, 6, 7, 8, 9, 0 } }); + data.Add(new SampleVectorOfNumbersData + { + Features = new float[10] { 1, + 2, 3, 4, 5, 6, 7, 8, 9, 0 } + }); data.Add(new SampleVectorOfNumbersData { diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Projection/VectorWhitenWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Projection/VectorWhitenWithOptions.cs index b1f852fc2c..d5f29d10e5 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Projection/VectorWhitenWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Projection/VectorWhitenWithOptions.cs @@ -40,7 +40,7 @@ public static void Example() foreach (var row in column) Console.WriteLine(string.Join(" ", row.DenseValues().Select(x => - x.ToString("f3")))+" "); + x.ToString("f3"))) + " "); }; @@ -77,15 +77,21 @@ private class SampleVectorOfNumbersData /// /// Returns a few rows of the infertility dataset. /// - private static IEnumerable + private static IEnumerable GetVectorOfNumbersData() { var data = new List(); - data.Add(new SampleVectorOfNumbersData { Features = new float[10] { 0, - 1, 2, 3, 4, 5, 6, 7, 8, 9 } }); + data.Add(new SampleVectorOfNumbersData + { + Features = new float[10] { 0, + 1, 2, 3, 4, 5, 6, 7, 8, 9 } + }); - data.Add(new SampleVectorOfNumbersData { Features = new float[10] { 1, - 2, 3, 4, 5, 6, 7, 8, 9, 0 } }); + data.Add(new SampleVectorOfNumbersData + { + Features = new float[10] { 1, + 2, 3, 4, 5, 6, 7, 8, 9, 0 } + }); data.Add(new SampleVectorOfNumbersData { diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ApplyCustomWordEmbedding.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ApplyCustomWordEmbedding.cs index e1a275f763..03f61ab3e2 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ApplyCustomWordEmbedding.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ApplyCustomWordEmbedding.cs @@ -59,8 +59,11 @@ public static void Example() TransformedTextData>(textTransformer); // Call the prediction API to convert the text into embedding vector. - var data = new TextData() { Text = "This is a great product. I would " + - "like to buy it again." }; + var data = new TextData() + { + Text = "This is a great product. I would " + + "like to buy it again." + }; var prediction = predictionEngine.Predict(data); // Print the length of the embedding vector. diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ApplyWordEmbedding.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ApplyWordEmbedding.cs index cfd1077f8f..75cf1fe1a9 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ApplyWordEmbedding.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ApplyWordEmbedding.cs @@ -49,8 +49,11 @@ public static void Example() TransformedTextData>(textTransformer); // Call the prediction API to convert the text into embedding vector. - var data = new TextData() { Text = "This is a great product. I would " + - "like to buy it again." }; + var data = new TextData() + { + Text = "This is a great product. I would " + + "like to buy it again." + }; var prediction = predictionEngine.Predict(data); // Print the length of the embedding vector. diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/FeaturizeText.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/FeaturizeText.cs index 5b62d0639e..fcf620e158 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/FeaturizeText.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/FeaturizeText.cs @@ -52,7 +52,7 @@ public static void Example() // The length of the output feature vector depends on these settings. var textPipeline = mlContext.Transforms.Text.FeaturizeText("Features", "Text"); - + // Fit to data. var textTransformer = textPipeline.Fit(dataview); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/FeaturizeTextWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/FeaturizeTextWithOptions.cs index 3f405176bb..802c003310 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/FeaturizeTextWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/FeaturizeTextWithOptions.cs @@ -49,14 +49,24 @@ public static void Example() OutputTokensColumnName = "OutputTokens", CaseMode = TextNormalizingEstimator.CaseMode.Lower, // Use ML.NET's built-in stop word remover - StopWordsRemoverOptions = new StopWordsRemovingEstimator.Options() { - Language = TextFeaturizingEstimator.Language.English }, - - WordFeatureExtractor = new WordBagEstimator.Options() { NgramLength - = 2, UseAllLengths = true }, - - CharFeatureExtractor = new WordBagEstimator.Options() { NgramLength - = 3, UseAllLengths= false }, + StopWordsRemoverOptions = new StopWordsRemovingEstimator.Options() + { + Language = TextFeaturizingEstimator.Language.English + }, + + WordFeatureExtractor = new WordBagEstimator.Options() + { + NgramLength + = 2, + UseAllLengths = true + }, + + CharFeatureExtractor = new WordBagEstimator.Options() + { + NgramLength + = 3, + UseAllLengths = false + }, }; var textPipeline = mlContext.Transforms.Text.FeaturizeText("Features", options, "Text"); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/NormalizeText.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/NormalizeText.cs index 2100c13371..1b9c5eda8d 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/NormalizeText.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/NormalizeText.cs @@ -38,9 +38,12 @@ public static void Example() TransformedTextData>(normTextTransformer); // Call the prediction API. - var data = new TextData() { Text = "ML.NET's NormalizeText API " + + var data = new TextData() + { + Text = "ML.NET's NormalizeText API " + "changes the case of the TEXT and removes/keeps diâcrîtîcs, " + - "punctuations, and/or numbers (123)." }; + "punctuations, and/or numbers (123)." + }; var prediction = predictionEngine.Predict(data); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceHashedNgrams.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceHashedNgrams.cs index 23cb9a50fe..6f8c60f6d4 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceHashedNgrams.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceHashedNgrams.cs @@ -46,12 +46,12 @@ public static void Example() "Text") .Append(mlContext.Transforms.Conversion.MapValueToKey("Tokens")) .Append(mlContext.Transforms.Text.ProduceHashedNgrams( - "NgramFeatures", "Tokens", + "NgramFeatures", "Tokens", numberOfBits: 5, ngramLength: 3, useAllLengths: false, maximumNumberOfInverts: 1)); - + // Fit to data. var textTransformer = textPipeline.Fit(dataview); var transformedDataView = textTransformer.Transform(dataview); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceNgrams.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceNgrams.cs index fa53a49778..002f4278fa 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceNgrams.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/ProduceNgrams.cs @@ -59,7 +59,7 @@ public static void Example() ngramLength: 3, useAllLengths: false, weighting: NgramExtractingEstimator.WeightingCriteria.Tf)); - + // Fit to data. var textTransformer = textPipeline.Fit(dataview); var transformedDataView = textTransformer.Transform(dataview); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/RemoveDefaultStopWords.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/RemoveDefaultStopWords.cs index 6147bf155e..3bc8189da1 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/RemoveDefaultStopWords.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/RemoveDefaultStopWords.cs @@ -30,7 +30,7 @@ public static void Example() var textPipeline = mlContext.Transforms.Text.TokenizeIntoWords("Words", "Text") .Append(mlContext.Transforms.Text.RemoveDefaultStopWords( - "WordsWithoutStopWords", "Words", language: + "WordsWithoutStopWords", "Words", language: StopWordsRemovingEstimator.Language.English)); // Fit to data. @@ -42,9 +42,12 @@ public static void Example() TransformedTextData>(textTransformer); // Call the prediction API to remove stop words. - var data = new TextData() { Text = "ML.NET's RemoveDefaultStopWords " + + var data = new TextData() + { + Text = "ML.NET's RemoveDefaultStopWords " + "API removes stop words from tHe text/string. It requires the " + - "text/string to be tokenized beforehand." }; + "text/string to be tokenized beforehand." + }; var prediction = predictionEngine.Predict(data); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/RemoveStopWords.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/RemoveStopWords.cs index 55a0f0c955..0df82a4fdd 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/RemoveStopWords.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/RemoveStopWords.cs @@ -30,7 +30,7 @@ public static void Example() "Text") .Append(mlContext.Transforms.Text.RemoveStopWords( "WordsWithoutStopWords", "Words", stopwords: - new[] { "a", "the","from", "by" })); + new[] { "a", "the", "from", "by" })); // Fit to data. var textTransformer = textPipeline.Fit(emptyDataView); @@ -41,9 +41,12 @@ public static void Example() TransformedTextData>(textTransformer); // Call the prediction API to remove stop words. - var data = new TextData() { Text = "ML.NET's RemoveStopWords API " + + var data = new TextData() + { + Text = "ML.NET's RemoveStopWords API " + "removes stop words from tHe text/string using a list of stop " + - "words provided by the user." }; + "words provided by the user." + }; var prediction = predictionEngine.Predict(data); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/TokenizeIntoCharactersAsKeys.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/TokenizeIntoCharactersAsKeys.cs index ff984b4d46..88b38822fd 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/TokenizeIntoCharactersAsKeys.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/TokenizeIntoCharactersAsKeys.cs @@ -40,9 +40,12 @@ public static void Example() TransformedTextData>(textTransformer); // Call the prediction API to convert the text into characters. - var data = new TextData() { Text = "ML.NET's " + + var data = new TextData() + { + Text = "ML.NET's " + "TokenizeIntoCharactersAsKeys API splits text/string into " + - "characters." }; + "characters." + }; var prediction = predictionEngine.Predict(data); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/TokenizeIntoWords.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/TokenizeIntoWords.cs index ad0826ad19..8c20a7df69 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/TokenizeIntoWords.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/Text/TokenizeIntoWords.cs @@ -38,9 +38,12 @@ public static void Example() TransformedTextData>(textTransformer); // Call the prediction API to convert the text into words. - var data = new TextData() { Text = "ML.NET's TokenizeIntoWords API " + + var data = new TextData() + { + Text = "ML.NET's TokenizeIntoWords API " + "splits text/string into words using the list of characters " + - "provided as separators." }; + "provided as separators." + }; var prediction = predictionEngine.Predict(data); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectAnomalyBySrCnn.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectAnomalyBySrCnn.cs index db29b3ef82..6c5b494073 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectAnomalyBySrCnn.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectAnomalyBySrCnn.cs @@ -108,7 +108,7 @@ public static void Example() //5 0 0.01 0.25 } - private static void PrintPrediction(float value, SrCnnAnomalyDetection + private static void PrintPrediction(float value, SrCnnAnomalyDetection prediction) => Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", value, prediction .Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsa.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsa.cs index 23fb7e5c9c..b85fa73d2e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsa.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsa.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.IO; using Microsoft.ML; @@ -68,7 +68,7 @@ public static void Example() // prediction engine. Console.WriteLine($"Output from ChangePoint predictions on new data:"); Console.WriteLine("Data\tAlert\tScore\tP-Value\tMartingale value"); - + // Output from ChangePoint predictions on new data: // Data Alert Score P-Value Martingale value diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaBatchPrediction.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaBatchPrediction.cs index 85732f9259..79b10e30c8 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaBatchPrediction.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaBatchPrediction.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using Microsoft.ML; using Microsoft.ML.Data; @@ -100,7 +100,7 @@ public static void Example() } private static void PrintPrediction(float value, ChangePointPrediction - prediction) => + prediction) => Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2], prediction.Prediction[3]); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaStream.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaStream.cs index dfab85aee2..f363e283b3 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaStream.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectChangePointBySsaStream.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.IO; using Microsoft.ML; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePoint.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePoint.cs index 852fc9f8e9..b64f4d2817 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePoint.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePoint.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -65,9 +65,9 @@ public static void Example() $"post-transformation."); Console.WriteLine("Data\tAlert\tScore\tP-Value\tMartingale value"); - + // Data Alert Score P-Value Martingale value - + // Create non-anomalous data and check for change point. for (int index = 0; index < 8; index++) { diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePointBatchPrediction.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePointBatchPrediction.cs index f0d5e193af..829cdd575d 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePointBatchPrediction.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidChangePointBatchPrediction.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidSpike.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidSpike.cs index 2b5c0c92c9..00ef33f5fc 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidSpike.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidSpike.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.IO; using Microsoft.ML; @@ -57,7 +57,7 @@ public static void Example() $"post-transformation."); Console.WriteLine("Data\tAlert\tScore\tP-Value"); - + // Prediction column obtained post-transformation. // Data Alert Score P-Value @@ -76,7 +76,7 @@ public static void Example() // Spike. PrintPrediction(10, engine.Predict(new TimeSeriesData(10))); - + // 10 1 10.00 0.00 <-- alert is on, predicted spike (check-point model) // Checkpoint the model. @@ -102,7 +102,7 @@ public static void Example() } private static void PrintPrediction(float value, IidSpikePrediction - prediction) => + prediction) => Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidSpikeBatchPrediction.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidSpikeBatchPrediction.cs index 67845eb158..ef2d78a2f5 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidSpikeBatchPrediction.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectIidSpikeBatchPrediction.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using Microsoft.ML; using Microsoft.ML.Data; @@ -77,7 +77,7 @@ public static void Example() } private static void PrintPrediction(float value, IidSpikePrediction - prediction) => + prediction) => Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSeasonality.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSeasonality.cs index f786e58e76..75e81d9a14 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSeasonality.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSeasonality.cs @@ -48,4 +48,4 @@ public TimeSeriesData(double value) } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSpikeBySsa.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSpikeBySsa.cs index 068a940315..23eb7a665e 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSpikeBySsa.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSpikeBySsa.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.IO; using Microsoft.ML; @@ -105,7 +105,7 @@ public static void Example() // Run predictions on the loaded model. for (int i = 0; i < 5; i++) PrintPrediction(i, engine.Predict(new TimeSeriesData(i))); - + // 0 0 -2.74 0.40 <-- saved to disk, re-loaded, and running new predictions // 1 0 -1.47 0.42 // 2 0 -17.50 0.24 @@ -114,7 +114,7 @@ public static void Example() } private static void PrintPrediction(float value, SsaSpikePrediction - prediction) => + prediction) => Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSpikeBySsaBatchPrediction.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSpikeBySsaBatchPrediction.cs index 31d29432cf..ae7f0b6f87 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSpikeBySsaBatchPrediction.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/DetectSpikeBySsaBatchPrediction.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using Microsoft.ML; using Microsoft.ML.Data; @@ -103,7 +103,7 @@ public static void Example() } private static void PrintPrediction(float value, SsaSpikePrediction - prediction) => + prediction) => Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", value, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/Forecasting.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/Forecasting.cs index 07f0eef23d..a21a768217 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/Forecasting.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/Forecasting.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.IO; using Microsoft.ML; diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/ForecastingWithConfidenceInterval.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/ForecastingWithConfidenceInterval.cs index 4903b147b8..70f5c521b6 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/ForecastingWithConfidenceInterval.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/ForecastingWithConfidenceInterval.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using Microsoft.ML; using Microsoft.ML.Transforms.TimeSeries; @@ -98,7 +98,7 @@ public static void Example() // Forecast with the original model(that was checkpointed to disk). forecast = forecastEngine.Predict(); - PrintForecastValuesAndIntervals(forecast.Forecast, + PrintForecastValuesAndIntervals(forecast.Forecast, forecast.ConfidenceLowerBound, forecast.ConfidenceUpperBound); // [1.791331, 1.255525, 0.3060154, -0.200446, 0.5657795] diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/LocalizeRootCause.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/LocalizeRootCause.cs index f6c9d0ffd8..e2e7854b48 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/LocalizeRootCause.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/LocalizeRootCause.cs @@ -28,7 +28,7 @@ public static void Example() { count++; Console.WriteLine($"Root cause item #{count} ..."); - Console.WriteLine($"Score: {item.Score}, Path: {String.Join(" ",item.Path)}, Direction: {item.Direction}, Dimension:{String.Join(" ", item.Dimension)}"); + Console.WriteLine($"Score: {item.Score}, Path: {String.Join(" ", item.Path)}, Direction: {item.Direction}, Dimension:{String.Join(" ", item.Dimension)}"); } //Item #1 ... @@ -111,4 +111,4 @@ private static DateTime GetTimestamp() return new DateTime(2020, 3, 23, 0, 0, 0); } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/LocalizeRootCauseMultidimension.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/LocalizeRootCauseMultidimension.cs index 16ad1fb2e6..777ab15c80 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/LocalizeRootCauseMultidimension.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/LocalizeRootCauseMultidimension.cs @@ -137,4 +137,4 @@ private static DateTime GetTimestamp() return new DateTime(2020, 3, 23, 0, 0, 0); } } -} \ No newline at end of file +} diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastForestBinaryFeaturizationWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastForestBinaryFeaturizationWithOptions.cs index 9ca0b87a85..34feefd7b9 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastForestBinaryFeaturizationWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastForestBinaryFeaturizationWithOptions.cs @@ -120,7 +120,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); float randomFloat() => (float)random.NextDouble(); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastForestRegressionFeaturizationWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastForestRegressionFeaturizationWithOptions.cs index cc107d35fd..d6f554c634 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastForestRegressionFeaturizationWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastForestRegressionFeaturizationWithOptions.cs @@ -121,7 +121,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) @@ -131,7 +131,7 @@ public static void Example() { Label = label, // Create random features that are correlated with the label. - Features = Enumerable.Repeat(label, 3).Select(x => x + + Features = Enumerable.Repeat(label, 3).Select(x => x + (float)random.NextDouble()).ToArray() }; } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeBinaryFeaturizationWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeBinaryFeaturizationWithOptions.cs index 521e04e23b..98300fb996 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeBinaryFeaturizationWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeBinaryFeaturizationWithOptions.cs @@ -122,7 +122,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); float randomFloat() => (float)random.NextDouble(); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeRegressionFeaturizationWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeRegressionFeaturizationWithOptions.cs index 407bb2e22f..0111ee3e03 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeRegressionFeaturizationWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeRegressionFeaturizationWithOptions.cs @@ -121,7 +121,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) @@ -131,7 +131,7 @@ public static void Example() { Label = label, // Create random features that are correlated with the label. - Features = Enumerable.Repeat(label, 3).Select(x => x + + Features = Enumerable.Repeat(label, 3).Select(x => x + (float)random.NextDouble()).ToArray() }; } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeTweedieFeaturizationWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeTweedieFeaturizationWithOptions.cs index 085db4eeb7..6f1aed611a 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeTweedieFeaturizationWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/FastTreeTweedieFeaturizationWithOptions.cs @@ -121,7 +121,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); for (int i = 0; i < count; i++) @@ -131,7 +131,7 @@ public static void Example() { Label = label, // Create random features that are correlated with the label. - Features = Enumerable.Repeat(label, 3).Select(x => x + + Features = Enumerable.Repeat(label, 3).Select(x => x + (float)random.NextDouble()).ToArray() }; } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/PretrainedTreeEnsembleFeaturizationWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/PretrainedTreeEnsembleFeaturizationWithOptions.cs index b60c74672d..cdff7454d8 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/PretrainedTreeEnsembleFeaturizationWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TreeFeaturization/PretrainedTreeEnsembleFeaturizationWithOptions.cs @@ -129,7 +129,7 @@ public static void Example() } private static IEnumerable GenerateRandomDataPoints(int count, - int seed=0) + int seed = 0) { var random = new Random(seed); float randomFloat() => (float)random.NextDouble(); diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/WithOnFitDelegate.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/WithOnFitDelegate.cs index 72c541b8b3..eaa4fc56ba 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/WithOnFitDelegate.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/WithOnFitDelegate.cs @@ -60,7 +60,7 @@ public static void Example() // False // Inspect some of the properties of the binning transformer - var binningParam = binningTransformer.GetNormalizerModelParameters(0) as + var binningParam = binningTransformer.GetNormalizerModelParameters(0) as BinNormalizerModelParameters>; for (int i = 0; i < binningParam.UpperBounds.Length; i++) diff --git a/docs/samples/Microsoft.ML.Samples/Program.cs b/docs/samples/Microsoft.ML.Samples/Program.cs index b30e20d7cf..0fd34a8c97 100644 --- a/docs/samples/Microsoft.ML.Samples/Program.cs +++ b/docs/samples/Microsoft.ML.Samples/Program.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Reflection; using Samples.Dynamic;