diff --git a/.editorconfig b/.editorconfig index 869de4abbe..da57e1fb43 100644 --- a/.editorconfig +++ b/.editorconfig @@ -306,6 +306,10 @@ dotnet_diagnostic.IDE2004.severity = warning dotnet_diagnostic.CA1822.severity = suggestion [test/**/*.cs] +# IDE0044: Don't force readonly for tests +dotnet_diagnostic.IDE0044.severity = none +dotnet_style_readonly_field = false + # MSML_GeneralName: This name should be PascalCased dotnet_diagnostic.MSML_GeneralName.severity = none @@ -322,4 +326,18 @@ dotnet_diagnostic.MSML_ExtendBaseTestClass.severity = none # The MSML_RelaxTestNaming suppressor for VSTHRD200 is not active for CodeAnalyzer.Tests, so we disable it altogether. # VSTHRD200: Use "Async" suffix for async methods -dotnet_diagnostic.VSTHRD200.severity = none \ No newline at end of file +dotnet_diagnostic.VSTHRD200.severity = none + +[docs/**/*.cs] +# IDE0073: Dont want license file header in samples +dotnet_diagnostic.IDE0073.severity = none +file_header_template = unset + +# IDE0044: Don't force readonly for samples +dotnet_diagnostic.IDE0044.severity = none +dotnet_style_readonly_field = false + +[test/Microsoft.ML.TestFrameworkCommon/Utility/*.cs] +# IDE0073: Dont want license file header in code we are using from elsewhere +dotnet_diagnostic.IDE0073.severity = none +file_header_template = unset \ No newline at end of file diff --git a/Directory.Build.props b/Directory.Build.props index 28e1f6527d..2e4c963fad 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -17,6 +17,7 @@ portable true latest + true diff --git a/docs/samples/Microsoft.ML.AutoML.Samples/RankingExperiment.cs b/docs/samples/Microsoft.ML.AutoML.Samples/RankingExperiment.cs index 42abd9cb37..148db9388e 100644 --- a/docs/samples/Microsoft.ML.AutoML.Samples/RankingExperiment.cs +++ b/docs/samples/Microsoft.ML.AutoML.Samples/RankingExperiment.cs @@ -1,10 +1,10 @@ using System; using System.Collections.Generic; -using System.Text; using System.IO; using System.Linq; -using Microsoft.ML.Data; +using System.Text; using Microsoft.ML.AutoML.Samples.DataStructures; +using Microsoft.ML.Data; namespace Microsoft.ML.AutoML.Samples { diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs index 1e2a5b525f..ba6243a498 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/ImageClassification/ResnetV2101TransferLearningEarlyStopping.cs @@ -2,15 +2,15 @@ using System; using System.Collections.Generic; using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net; +using System.Threading; using System.Threading.Tasks; using Microsoft.ML; -using static Microsoft.ML.DataOperationsCatalog; -using System.Linq; using Microsoft.ML.Data; -using System.IO.Compression; -using System.Threading; -using System.Net; using Microsoft.ML.Vision; +using static Microsoft.ML.DataOperationsCatalog; namespace Samples.Dynamic { diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingWithInMemoryCustomType.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingWithInMemoryCustomType.cs index 5272233d9a..44550a7951 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingWithInMemoryCustomType.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/CustomMappingWithInMemoryCustomType.cs @@ -18,7 +18,7 @@ class CustomMappingWithInMemoryCustomType // AlienBody members to produce a SuperAlienHero entity with a "Name" member of type // string and a single "Merged" member of type AlienBody, where the merger is just // the addition of the various members of AlienBody. - static public void Example() + public static void Example() { var mlContext = new MLContext(); // Build in-memory data. diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ConvertToGrayScaleInMemory.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ConvertToGrayScaleInMemory.cs index 3bb0586cc0..1d7f662e85 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ConvertToGrayScaleInMemory.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/ImageAnalytics/ConvertToGrayScaleInMemory.cs @@ -7,7 +7,7 @@ namespace Samples.Dynamic { class ConvertToGrayScaleInMemory { - static public void Example() + public static void Example() { var mlContext = new MLContext(); // Create an image list. diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/ForecastingWithConfidenceInterval.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/ForecastingWithConfidenceInterval.cs index 70f5c521b6..f2043b1aeb 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/ForecastingWithConfidenceInterval.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Transforms/TimeSeries/ForecastingWithConfidenceInterval.cs @@ -1,8 +1,8 @@ using System; using System.Collections.Generic; +using System.IO; using Microsoft.ML; using Microsoft.ML.Transforms.TimeSeries; -using System.IO; namespace Samples.Dynamic { diff --git a/src/Microsoft.Data.Analysis/ArrowStringDataFrameColumn.cs b/src/Microsoft.Data.Analysis/ArrowStringDataFrameColumn.cs index 5065009422..56041711e3 100644 --- a/src/Microsoft.Data.Analysis/ArrowStringDataFrameColumn.cs +++ b/src/Microsoft.Data.Analysis/ArrowStringDataFrameColumn.cs @@ -20,9 +20,9 @@ namespace Microsoft.Data.Analysis /// public partial class ArrowStringDataFrameColumn : DataFrameColumn, IEnumerable { - private IList> _dataBuffers; - private IList> _offsetsBuffers; - private IList> _nullBitMapBuffers; + private readonly IList> _dataBuffers; + private readonly IList> _offsetsBuffers; + private readonly IList> _nullBitMapBuffers; /// /// Constructs an empty with the given . @@ -40,7 +40,7 @@ public ArrowStringDataFrameColumn(string name) : base(name, 0, typeof(string)) /// /// The name of the column. /// The Arrow formatted string values in this column. - /// The Arrow formatted offets in this column. + /// The Arrow formatted offsets in this column. /// The Arrow formatted null bits in this column. /// The length of the column. /// The number of values in this column. diff --git a/src/Microsoft.Data.Analysis/DataFrame.IO.cs b/src/Microsoft.Data.Analysis/DataFrame.IO.cs index 1253ed6149..a5182bc4cd 100644 --- a/src/Microsoft.Data.Analysis/DataFrame.IO.cs +++ b/src/Microsoft.Data.Analysis/DataFrame.IO.cs @@ -293,10 +293,10 @@ private static DataFrameColumn CreateColumn(Type kind, string[] columnNames, int private class WrappedStreamReaderOrStringReader { - private Stream _stream; - private long _initialPosition; - private Encoding _encoding; - private string _csvString; + private readonly Stream _stream; + private readonly long _initialPosition; + private readonly Encoding _encoding; + private readonly string _csvString; public WrappedStreamReaderOrStringReader(Stream stream, Encoding encoding) { diff --git a/src/Microsoft.Data.Analysis/DataFrame.Join.cs b/src/Microsoft.Data.Analysis/DataFrame.Join.cs index 2af42a5566..426abaaf2f 100644 --- a/src/Microsoft.Data.Analysis/DataFrame.Join.cs +++ b/src/Microsoft.Data.Analysis/DataFrame.Join.cs @@ -142,7 +142,7 @@ public DataFrame Join(DataFrame other, string leftSuffix = "_left", string right return ret; } - private static bool IsAnyNullValueInColumns (IReadOnlyCollection columns, long index) + private static bool IsAnyNullValueInColumns(IReadOnlyCollection columns, long index) { foreach (var column in columns) { @@ -176,19 +176,19 @@ private static HashSet Merge(DataFrame retainedDataFrame, DataFrame supple throw new ArgumentNullException(nameof(supplemetaryJoinColumnNames)); if (retainedJoinColumnNames.Length != supplemetaryJoinColumnNames.Length) - throw new ArgumentException(Strings.MismatchedArrayLengths, nameof(retainedJoinColumnNames)); - + throw new ArgumentException(Strings.MismatchedArrayLengths, nameof(retainedJoinColumnNames)); + HashSet intersection = calculateIntersection ? new HashSet() : null; // Get occurrences of values in columns used for join in the retained and supplementary dataframes Dictionary> occurrences = null; Dictionary retainedIndicesReverseMapping = null; - + HashSet supplementaryJoinColumnsNullIndices = new HashSet(); - - for (int colNameIndex = 0; colNameIndex < retainedJoinColumnNames.Length; colNameIndex++) + + for (int colNameIndex = 0; colNameIndex < retainedJoinColumnNames.Length; colNameIndex++) { DataFrameColumn shrinkedRetainedColumn = retainedDataFrame.Columns[retainedJoinColumnNames[colNameIndex]]; @@ -211,7 +211,7 @@ private static HashSet Merge(DataFrame retainedDataFrame, DataFrame supple retainedIndicesReverseMapping = newRetainedIndicesReverseMapping; shrinkedRetainedColumn = shrinkedRetainedColumn.Clone(new Int64DataFrameColumn("Indices", shrinkedRetainedIndices)); } - + DataFrameColumn supplementaryColumn = supplementaryDataFrame.Columns[supplemetaryJoinColumnNames[colNameIndex]]; //Find occurrenses on current step (join column) @@ -222,7 +222,7 @@ private static HashSet Merge(DataFrame retainedDataFrame, DataFrame supple newOccurrences = newOccurrences.ToDictionary(kvp => retainedIndicesReverseMapping[kvp.Key], kvp => kvp.Value); supplementaryJoinColumnsNullIndices.UnionWith(supplementaryColumnNullIndices); - + // shrink join result on current column by previous join columns (if any) // (we have to remove occurrences that doesn't exist in previous columns, because JOIN happens only if ALL left and right columns in JOIN are matched) if (occurrences != null) @@ -242,7 +242,7 @@ private static HashSet Merge(DataFrame retainedDataFrame, DataFrame supple occurrences = newOccurrences; } - + retainedRowIndices = new Int64DataFrameColumn("RetainedIndices"); supplementaryRowIndices = new Int64DataFrameColumn("SupplementaryIndices"); @@ -280,7 +280,7 @@ private static HashSet Merge(DataFrame retainedDataFrame, DataFrame supple } } else - { + { foreach (long row in supplementaryJoinColumnsNullIndices) { retainedRowIndices.Append(i); @@ -288,10 +288,10 @@ private static HashSet Merge(DataFrame retainedDataFrame, DataFrame supple } } } - + return intersection; } - + public DataFrame Merge(DataFrame other, string[] leftJoinColumns, string[] rightJoinColumns, string leftSuffix = "_left", string rightSuffix = "_right", JoinAlgorithm joinAlgorithm = JoinAlgorithm.Left) { if (other == null) @@ -335,7 +335,7 @@ public DataFrame Merge(DataFrame other, string[] leftJoinColumns, string[] right else if (joinAlgorithm == JoinAlgorithm.FullOuter) { //In full outer join we would like to retain data from both side, so we do it into 2 steps: one first we do LEFT JOIN and then add lost data from the RIGHT side - + //Step 1 //Do LEFT JOIN isLeftDataFrameRetained = true; @@ -347,7 +347,7 @@ public DataFrame Merge(DataFrame other, string[] leftJoinColumns, string[] right var retainedJoinColumns = isLeftDataFrameRetained ? leftJoinColumns : rightJoinColumns; var intersection = Merge(retainedDataFrame, supplementaryDataFrame, retainedJoinColumns, supplementaryJoinColumns, out retainedRowIndices, out supplementaryRowIndices, calculateIntersection: true); - + //Step 2 //Do RIGHT JOIN to retain all data from supplementary DataFrame too (take into account data intersection from the first step to avoid duplicates) for (long i = 0; i < supplementaryDataFrame.Columns.RowCount; i++) @@ -365,9 +365,9 @@ public DataFrame Merge(DataFrame other, string[] leftJoinColumns, string[] right } else throw new NotImplementedException(nameof(joinAlgorithm)); - + DataFrame ret = new DataFrame(); - + //insert columns from left dataframe (this) for (int i = 0; i < this.Columns.Count; i++) { diff --git a/src/Microsoft.Data.Analysis/DataFrameColumn.cs b/src/Microsoft.Data.Analysis/DataFrameColumn.cs index 88415d8bdc..15cf669144 100644 --- a/src/Microsoft.Data.Analysis/DataFrameColumn.cs +++ b/src/Microsoft.Data.Analysis/DataFrameColumn.cs @@ -244,7 +244,7 @@ public virtual DataFrameColumn Sort(bool ascending = true) ret.Add(i, otherRowIndices); } } - + return ret; } diff --git a/src/Microsoft.Data.Analysis/DataFrameColumnCollection.cs b/src/Microsoft.Data.Analysis/DataFrameColumnCollection.cs index 45f40b4696..0701588f3f 100644 --- a/src/Microsoft.Data.Analysis/DataFrameColumnCollection.cs +++ b/src/Microsoft.Data.Analysis/DataFrameColumnCollection.cs @@ -13,11 +13,11 @@ namespace Microsoft.Data.Analysis /// public class DataFrameColumnCollection : Collection { - private Action ColumnsChanged; + private readonly Action ColumnsChanged; - private List _columnNames = new List(); + private readonly List _columnNames = new List(); - private Dictionary _columnNameToIndexDictionary = new Dictionary(StringComparer.Ordinal); + private readonly Dictionary _columnNameToIndexDictionary = new Dictionary(StringComparer.Ordinal); internal long RowCount { get; set; } diff --git a/src/Microsoft.Data.Analysis/GroupBy.cs b/src/Microsoft.Data.Analysis/GroupBy.cs index 57c2f3e43a..357fa80a63 100644 --- a/src/Microsoft.Data.Analysis/GroupBy.cs +++ b/src/Microsoft.Data.Analysis/GroupBy.cs @@ -101,9 +101,9 @@ IEnumerator IEnumerable.GetEnumerator() #endregion - private int _groupByColumnIndex; - private IDictionary> _keyToRowIndicesMap; - private DataFrame _dataFrame; + private readonly int _groupByColumnIndex; + private readonly IDictionary> _keyToRowIndicesMap; + private readonly DataFrame _dataFrame; public GroupBy(DataFrame dataFrame, int groupByColumnIndex, IDictionary> keyToRowIndices) { diff --git a/src/Microsoft.Data.Analysis/PrimitiveDataFrameColumn.cs b/src/Microsoft.Data.Analysis/PrimitiveDataFrameColumn.cs index 8b44bca18f..730b8ddfd6 100644 --- a/src/Microsoft.Data.Analysis/PrimitiveDataFrameColumn.cs +++ b/src/Microsoft.Data.Analysis/PrimitiveDataFrameColumn.cs @@ -20,7 +20,7 @@ namespace Microsoft.Data.Analysis public partial class PrimitiveDataFrameColumn : DataFrameColumn, IEnumerable where T : unmanaged { - private PrimitiveColumnContainer _columnContainer; + private readonly PrimitiveColumnContainer _columnContainer; internal PrimitiveColumnContainer ColumnContainer => _columnContainer; diff --git a/src/Microsoft.Data.Analysis/StringDataFrameColumn.cs b/src/Microsoft.Data.Analysis/StringDataFrameColumn.cs index 4250c18785..0d88ef5505 100644 --- a/src/Microsoft.Data.Analysis/StringDataFrameColumn.cs +++ b/src/Microsoft.Data.Analysis/StringDataFrameColumn.cs @@ -17,7 +17,7 @@ namespace Microsoft.Data.Analysis /// Is NOT Arrow compatible public partial class StringDataFrameColumn : DataFrameColumn, IEnumerable { - private List> _stringBuffers = new List>(); // To store more than intMax number of strings + private readonly List> _stringBuffers = new List>(); // To store more than intMax number of strings public StringDataFrameColumn(string name, long length = 0) : base(name, length, typeof(string)) { diff --git a/src/Microsoft.Data.Analysis/TextFieldParser.cs b/src/Microsoft.Data.Analysis/TextFieldParser.cs index b72e5a77cf..e068458daa 100644 --- a/src/Microsoft.Data.Analysis/TextFieldParser.cs +++ b/src/Microsoft.Data.Analysis/TextFieldParser.cs @@ -22,12 +22,12 @@ internal enum FieldType internal class QuoteDelimitedFieldBuilder { - private StringBuilder _field; + private readonly StringBuilder _field; private bool _fieldFinished; private int _index; private int _delimiterLength; - private Regex _delimiterRegex; - private string _spaceChars; + private readonly Regex _delimiterRegex; + private readonly string _spaceChars; private bool _malformedLine; public QuoteDelimitedFieldBuilder(Regex delimiterRegex, string spaceChars) @@ -148,7 +148,7 @@ internal class TextFieldParser : IDisposable private Regex _delimiterWithEndCharsRegex; - private int[] _whitespaceCodes = new int[] { '\u0009', '\u000B', '\u000C', '\u0020', '\u0085', '\u00A0', '\u1680', '\u2000', '\u2001', '\u2002', '\u2003', '\u2004', '\u2005', '\u2006', '\u2007', '\u2008', '\u2009', '\u200A', '\u200B', '\u2028', '\u2029', '\u3000', '\uFEFF' }; + private readonly int[] _whitespaceCodes = new int[] { '\u0009', '\u000B', '\u000C', '\u0020', '\u0085', '\u00A0', '\u1680', '\u2000', '\u2001', '\u2002', '\u2003', '\u2004', '\u2005', '\u2006', '\u2007', '\u2008', '\u2009', '\u200A', '\u200B', '\u2028', '\u2029', '\u3000', '\uFEFF' }; private Regex _beginQuotesRegex; @@ -172,13 +172,13 @@ internal class TextFieldParser : IDisposable private string _spaceChars; - private int _maxLineSize = 10000000; + private readonly int _maxLineSize = 10000000; - private int _maxBufferSize = 10000000; + private readonly int _maxBufferSize = 10000000; - private bool _leaveOpen; + private readonly bool _leaveOpen; - private char[] newLineChars = Environment.NewLine.ToCharArray(); + private readonly char[] _newLineChars = Environment.NewLine.ToCharArray(); public string[] CommentTokens { @@ -419,7 +419,7 @@ public string ReadLine() } _lineNumber++; - return line.TrimEnd(newLineChars); + return line.TrimEnd(_newLineChars); } public string[] ReadFields() @@ -470,7 +470,7 @@ public string PeekChars(int numberOfChars) return null; } - line = line.TrimEnd(newLineChars); + line = line.TrimEnd(_newLineChars); if (line.Length < numberOfChars) { return line; @@ -799,7 +799,7 @@ private string[] ParseDelimitedLine() endHelper.BuildField(line, index); if (endHelper.MalformedLine) { - _errorLine = line.TrimEnd(newLineChars); + _errorLine = line.TrimEnd(_newLineChars); _errorLineNumber = currentLineNumber; throw new Exception(string.Format(Strings.CannotParseWithDelimiters, currentLineNumber)); } @@ -817,13 +817,13 @@ private string[] ParseDelimitedLine() string newLine = ReadNextDataLine(); if (newLine == null) { - _errorLine = line.TrimEnd(newLineChars); + _errorLine = line.TrimEnd(_newLineChars); _errorLineNumber = currentLineNumber; throw new Exception(string.Format(Strings.CannotParseWithDelimiters, currentLineNumber)); } if (line.Length + newLine.Length > _maxLineSize) { - _errorLine = line.TrimEnd(newLineChars); + _errorLine = line.TrimEnd(_newLineChars); _errorLineNumber = currentLineNumber; throw new Exception(string.Format(Strings.LineExceedsMaxLineSize, currentLineNumber)); } @@ -832,7 +832,7 @@ private string[] ParseDelimitedLine() endHelper.BuildField(line, endOfLine); if (endHelper.MalformedLine) { - _errorLine = line.TrimEnd(newLineChars); + _errorLine = line.TrimEnd(_newLineChars); _errorLineNumber = currentLineNumber; throw new Exception(string.Format(Strings.CannotParseWithDelimiters, currentLineNumber)); } @@ -862,7 +862,7 @@ private string[] ParseDelimitedLine() index = delimiterMatch.Index + delimiterMatch.Length; continue; } - field = line.Substring(index).TrimEnd(newLineChars); + field = line.Substring(index).TrimEnd(_newLineChars); if (_trimWhiteSpace) { field = field.Trim(); @@ -881,7 +881,7 @@ private string[] ParseFixedWidthLine() { return null; } - line = line.TrimEnd(newLineChars); + line = line.TrimEnd(_newLineChars); StringInfo lineInfo = new StringInfo(line); ValidateFixedWidthLine(lineInfo, _lineNumber - 1); int index = 0; @@ -897,7 +897,7 @@ private string[] ParseFixedWidthLine() private string GetFixedWidthField(StringInfo line, int index, int fieldLength) { - string field = (fieldLength > 0) ? line.SubstringByTextElements(index, fieldLength) : ((index < line.LengthInTextElements) ? line.SubstringByTextElements(index).TrimEnd(newLineChars) : string.Empty); + string field = (fieldLength > 0) ? line.SubstringByTextElements(index, fieldLength) : ((index < line.LengthInTextElements) ? line.SubstringByTextElements(index).TrimEnd(_newLineChars) : string.Empty); if (_trimWhiteSpace) { return field.Trim(); @@ -1062,7 +1062,7 @@ private void ValidateDelimiters(string[] delimiterArray) { throw new Exception(Strings.EmptyDelimiters); } - if (delimiter.IndexOfAny(newLineChars) > -1) + if (delimiter.IndexOfAny(_newLineChars) > -1) { throw new Exception(Strings.DelimiterCannotBeNewlineChar); } diff --git a/src/Microsoft.Extensions.ML/Properties/AssemblyInfo.cs b/src/Microsoft.Extensions.ML/Properties/AssemblyInfo.cs index 03ef4bd89d..54a42d8f3e 100644 --- a/src/Microsoft.Extensions.ML/Properties/AssemblyInfo.cs +++ b/src/Microsoft.Extensions.ML/Properties/AssemblyInfo.cs @@ -1,3 +1,7 @@ -using System.Runtime.CompilerServices; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System.Runtime.CompilerServices; [assembly: InternalsVisibleTo("Microsoft.Extensions.ML.Tests, PublicKey=00240000048000009400000006020000002400005253413100040000010001004b86c4cb78549b34bab61a3b1800e23bfeb5b3ec390074041536a7e3cbd97f5f04cf0f857155a8928eaa29ebfd11cfbbad3ba70efea7bda3226c6a8d370a4cd303f714486b6ebc225985a638471e6ef571cc92a4613c00b8fa65d61ccee0cbe5f36330c9a01f4183559f1bef24cc2917c6d913e3a541333a1d05d9bed22b38cb")] diff --git a/src/Microsoft.ML.AutoML/API/AutoCatalog.cs b/src/Microsoft.ML.AutoML/API/AutoCatalog.cs index d623c16602..8193bcc26e 100644 --- a/src/Microsoft.ML.AutoML/API/AutoCatalog.cs +++ b/src/Microsoft.ML.AutoML/API/AutoCatalog.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/API/BinaryClassificationExperiment.cs b/src/Microsoft.ML.AutoML/API/BinaryClassificationExperiment.cs index 84f539c932..359c19fcb2 100644 --- a/src/Microsoft.ML.AutoML/API/BinaryClassificationExperiment.cs +++ b/src/Microsoft.ML.AutoML/API/BinaryClassificationExperiment.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/API/ColumnInference.cs b/src/Microsoft.ML.AutoML/API/ColumnInference.cs index d3dc5b0df4..bca0c7a97c 100644 --- a/src/Microsoft.ML.AutoML/API/ColumnInference.cs +++ b/src/Microsoft.ML.AutoML/API/ColumnInference.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/API/ExperimentBase.cs b/src/Microsoft.ML.AutoML/API/ExperimentBase.cs index d3e4eb295b..5b6df45f49 100644 --- a/src/Microsoft.ML.AutoML/API/ExperimentBase.cs +++ b/src/Microsoft.ML.AutoML/API/ExperimentBase.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/API/ExperimentResults/CrossValidationExperimentResult.cs b/src/Microsoft.ML.AutoML/API/ExperimentResults/CrossValidationExperimentResult.cs index 5c1db48a59..35376f8da2 100644 --- a/src/Microsoft.ML.AutoML/API/ExperimentResults/CrossValidationExperimentResult.cs +++ b/src/Microsoft.ML.AutoML/API/ExperimentResults/CrossValidationExperimentResult.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/API/ExperimentResults/ExperimentResult.cs b/src/Microsoft.ML.AutoML/API/ExperimentResults/ExperimentResult.cs index d2ce912f5d..e66c3d92cd 100644 --- a/src/Microsoft.ML.AutoML/API/ExperimentResults/ExperimentResult.cs +++ b/src/Microsoft.ML.AutoML/API/ExperimentResults/ExperimentResult.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs b/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs index a32b0358f9..4ff9df0bae 100644 --- a/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs +++ b/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/API/MLContextExtension.cs b/src/Microsoft.ML.AutoML/API/MLContextExtension.cs index da223838e1..d3856c5b33 100644 --- a/src/Microsoft.ML.AutoML/API/MLContextExtension.cs +++ b/src/Microsoft.ML.AutoML/API/MLContextExtension.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/API/MulticlassClassificationExperiment.cs b/src/Microsoft.ML.AutoML/API/MulticlassClassificationExperiment.cs index 5b7cb1126c..1e2a88ca07 100644 --- a/src/Microsoft.ML.AutoML/API/MulticlassClassificationExperiment.cs +++ b/src/Microsoft.ML.AutoML/API/MulticlassClassificationExperiment.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/API/RankingExperiment.cs b/src/Microsoft.ML.AutoML/API/RankingExperiment.cs index f575fb6ca3..dc3ab312b1 100644 --- a/src/Microsoft.ML.AutoML/API/RankingExperiment.cs +++ b/src/Microsoft.ML.AutoML/API/RankingExperiment.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs b/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs index e86199be62..d5df868f34 100644 --- a/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs +++ b/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/ColumnInference/ColumnInformationUtil.cs b/src/Microsoft.ML.AutoML/ColumnInference/ColumnInformationUtil.cs index 6175aa9915..a33f830298 100644 --- a/src/Microsoft.ML.AutoML/ColumnInference/ColumnInformationUtil.cs +++ b/src/Microsoft.ML.AutoML/ColumnInference/ColumnInformationUtil.cs @@ -1,4 +1,4 @@ -// Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/src/Microsoft.ML.AutoML/Experiment/OptimizingMetricInfo.cs b/src/Microsoft.ML.AutoML/Experiment/OptimizingMetricInfo.cs index 11fecb7d14..2fa85fb47d 100644 --- a/src/Microsoft.ML.AutoML/Experiment/OptimizingMetricInfo.cs +++ b/src/Microsoft.ML.AutoML/Experiment/OptimizingMetricInfo.cs @@ -10,18 +10,18 @@ internal sealed class OptimizingMetricInfo { public bool IsMaximizing { get; } - private static RegressionMetric[] _minimizingRegressionMetrics = new RegressionMetric[] + private static readonly RegressionMetric[] _minimizingRegressionMetrics = new RegressionMetric[] { RegressionMetric.MeanAbsoluteError, RegressionMetric.MeanSquaredError, RegressionMetric.RootMeanSquaredError }; - private static BinaryClassificationMetric[] _minimizingBinaryMetrics = new BinaryClassificationMetric[] + private static readonly BinaryClassificationMetric[] _minimizingBinaryMetrics = new BinaryClassificationMetric[] { }; - private static MulticlassClassificationMetric[] _minimizingMulticlassMetrics = new MulticlassClassificationMetric[] + private static readonly MulticlassClassificationMetric[] _minimizingMulticlassMetrics = new MulticlassClassificationMetric[] { MulticlassClassificationMetric.LogLoss, }; diff --git a/src/Microsoft.ML.AutoML/Sweepers/SmacSweeper.cs b/src/Microsoft.ML.AutoML/Sweepers/SmacSweeper.cs index 8b953a9544..83f1abc118 100644 --- a/src/Microsoft.ML.AutoML/Sweepers/SmacSweeper.cs +++ b/src/Microsoft.ML.AutoML/Sweepers/SmacSweeper.cs @@ -344,8 +344,8 @@ private double[][] GetForestRegressionLeafValues(FastForestRegressionModelParame // Todo: Remove the reflection below for TreeTreeEnsembleModelParameters methods GetLeaf and GetLeafValue. // Long-term, replace with tree featurizer once it becomes available // Tracking issue -- https://github.com/dotnet/machinelearning-automl/issues/342 - private static MethodInfo _getLeafMethod = typeof(TreeEnsembleModelParameters).GetMethod("GetLeaf", BindingFlags.NonPublic | BindingFlags.Instance); - private static MethodInfo _getLeafValueMethod = typeof(TreeEnsembleModelParameters).GetMethod("GetLeafValue", BindingFlags.NonPublic | BindingFlags.Instance); + private static readonly MethodInfo _getLeafMethod = typeof(TreeEnsembleModelParameters).GetMethod("GetLeaf", BindingFlags.NonPublic | BindingFlags.Instance); + private static readonly MethodInfo _getLeafValueMethod = typeof(TreeEnsembleModelParameters).GetMethod("GetLeafValue", BindingFlags.NonPublic | BindingFlags.Instance); private static int GetLeaf(TreeEnsembleModelParameters model, int treeId, VBuffer features) { diff --git a/src/Microsoft.ML.AutoML/TrainerExtensions/TrainerExtensionUtil.cs b/src/Microsoft.ML.AutoML/TrainerExtensions/TrainerExtensionUtil.cs index 7d8d55e49a..843de811a7 100644 --- a/src/Microsoft.ML.AutoML/TrainerExtensions/TrainerExtensionUtil.cs +++ b/src/Microsoft.ML.AutoML/TrainerExtensions/TrainerExtensionUtil.cs @@ -75,7 +75,7 @@ internal static class TrainerExtensionUtil return options; } - private static string[] _lightGbmBoosterParamNames = new[] { "L2Regularization", "L1Regularization" }; + private static readonly string[] _lightGbmBoosterParamNames = new[] { "L2Regularization", "L1Regularization" }; private const string LightGbmBoosterPropName = "Booster"; public static TOptions CreateLightGbmOptions(IEnumerable sweepParams, ColumnInformation columnInfo) diff --git a/src/Microsoft.ML.AutoML/Utils/StringEditDistance.cs b/src/Microsoft.ML.AutoML/Utils/StringEditDistance.cs index 63b56e815a..e1d4f59218 100644 --- a/src/Microsoft.ML.AutoML/Utils/StringEditDistance.cs +++ b/src/Microsoft.ML.AutoML/Utils/StringEditDistance.cs @@ -1,4 +1,8 @@ -using System; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; namespace Microsoft.ML.AutoML.Utils { diff --git a/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/AzureCodeGenerator/AzureAttachConsoleAppCodeGenerator.cs b/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/AzureCodeGenerator/AzureAttachConsoleAppCodeGenerator.cs index f4cdf5c47c..854fb559e0 100644 --- a/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/AzureCodeGenerator/AzureAttachConsoleAppCodeGenerator.cs +++ b/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/AzureCodeGenerator/AzureAttachConsoleAppCodeGenerator.cs @@ -76,7 +76,7 @@ public AzureAttachConsoleAppCodeGenerator(Pipeline pipeline, ColumnInferenceResu IncludeRecommenderPackage = false, StablePackageVersion = _settings.StablePackageVersion, UnstablePackageVersion = _settings.UnstablePackageVersion, - OnnxRuntimePackageVersion = _settings.OnnxRuntimePacakgeVersion, + OnnxRuntimePackageVersion = _settings.OnnxRuntimePackageVersion, }.TransformText(), Name = $"{_settings.OutputName}.ConsoleApp.csproj", }; diff --git a/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/AzureCodeGenerator/AzureAttachModelCodeGenerator.cs b/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/AzureCodeGenerator/AzureAttachModelCodeGenerator.cs index d2f7e1393b..496fcb0f3f 100644 --- a/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/AzureCodeGenerator/AzureAttachModelCodeGenerator.cs +++ b/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/AzureCodeGenerator/AzureAttachModelCodeGenerator.cs @@ -92,7 +92,7 @@ public AzureAttachModelCodeGenerator(Pipeline pipeline, ColumnInferenceResults c IncludeRecommenderPackage = false, StablePackageVersion = _settings.StablePackageVersion, UnstablePackageVersion = _settings.UnstablePackageVersion, - OnnxRuntimePackageVersion = _settings.OnnxRuntimePacakgeVersion, + OnnxRuntimePackageVersion = _settings.OnnxRuntimePackageVersion, Target = _settings.Target, }.TransformText(), Name = $"{ _settings.OutputName }.Model.csproj", diff --git a/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/CodeGenerator.cs b/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/CodeGenerator.cs index a9033ad5ff..85efcb303f 100644 --- a/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/CodeGenerator.cs +++ b/src/Microsoft.ML.CodeGenerator/CodeGenerator/CSharp/CodeGenerator.cs @@ -285,7 +285,7 @@ internal IList GenerateClassLabels(IDictionary _open; + private readonly List _open; private bool _disposed; @@ -435,11 +435,11 @@ private static string GetProductVersion() [BestFriend] internal sealed class RepositoryReader : Repository { - private ZipArchive _archive; + private readonly ZipArchive _archive; // Maps from a normalized path to the entry in the _archive. This is needed since // a zip might use / or \ for directory separation. - private Dictionary _entries; + private readonly Dictionary _entries; public static RepositoryReader Open(Stream stream, IExceptionContext ectx = null, bool useFileSystem = true) { diff --git a/src/Microsoft.ML.Core/Environment/ConsoleEnvironment.cs b/src/Microsoft.ML.Core/Environment/ConsoleEnvironment.cs index 255c198161..9db31b9750 100644 --- a/src/Microsoft.ML.Core/Environment/ConsoleEnvironment.cs +++ b/src/Microsoft.ML.Core/Environment/ConsoleEnvironment.cs @@ -352,7 +352,7 @@ protected override void Dispose(bool disposing) private readonly MessageSensitivity _sensitivityFlags; // This object is used to write to the test log along with the console if the host process is a test environment - private TextWriter _testWriter; + private readonly TextWriter _testWriter; /// /// Create an ML.NET for local execution, with console feedback. diff --git a/src/Microsoft.ML.Core/Utilities/BinFinder.cs b/src/Microsoft.ML.Core/Utilities/BinFinder.cs index ed516fe8ff..ed4265f356 100644 --- a/src/Microsoft.ML.Core/Utilities/BinFinder.cs +++ b/src/Microsoft.ML.Core/Utilities/BinFinder.cs @@ -13,7 +13,7 @@ internal abstract class BinFinderBase { private Single[] _valuesSng; // distinct values private Double[] _valuesDbl; // distinct values - private List _counts; // counts for each value + private readonly List _counts; // counts for each value private int[] _path; // current set of pegs protected int CountBins { get; private set; } // number of bins @@ -314,8 +314,8 @@ public Peg(int index, int split) } } - private Heap _segmentHeap; // heap used for dropping initial peg placement - private HeapNode.Heap _pegHeap; // heap used for selecting the largest energy decrease + private readonly Heap _segmentHeap; // heap used for dropping initial peg placement + private readonly HeapNode.Heap _pegHeap; // heap used for selecting the largest energy decrease private int[] _accum; // integral of counts private int[] _path; // current set of pegs private float _meanBinSize; diff --git a/src/Microsoft.ML.Core/Utilities/DoubleParser.cs b/src/Microsoft.ML.Core/Utilities/DoubleParser.cs index 53b885a518..529d4016dc 100644 --- a/src/Microsoft.ML.Core/Utilities/DoubleParser.cs +++ b/src/Microsoft.ML.Core/Utilities/DoubleParser.cs @@ -732,7 +732,7 @@ private static bool TryParseCore(ReadOnlySpan span, ref int ich, ref bool // Map from base-10 exponent to 64-bit mantissa. // The approximation for 10^n is _mpe10man[n-1] * 2^(_mpe10e2[n-1]-64). - private static ulong[] _mpe10Man = new ulong[] { + private static readonly ulong[] _mpe10Man = new ulong[] { 0xA000000000000000UL, 0xC800000000000000UL, 0xFA00000000000000UL, 0x9C40000000000000UL, 0xC350000000000000UL, /*005*/ 0xF424000000000000UL, 0x9896800000000000UL, 0xBEBC200000000000UL, 0xEE6B280000000000UL, 0x9502F90000000000UL, /*010*/ 0xBA43B74000000000UL, 0xE8D4A51000000000UL, 0x9184E72A00000000UL, 0xB5E620F480000000UL, 0xE35FA931A0000000UL, /*015*/ @@ -801,7 +801,7 @@ private static bool TryParseCore(ReadOnlySpan span, ref int ich, ref bool // Map from negative base-10 exponent to 64-bit mantissa. Note that the top bit of these is set. // The approximation for 10^-n is _mpne10man[n-1] * 2^(-_mpne10ne2[n-1]-64). - private static ulong[] _mpne10Man = new ulong[] { + private static readonly ulong[] _mpne10Man = new ulong[] { 0xCCCCCCCCCCCCCCCDUL, 0xA3D70A3D70A3D70AUL, 0x83126E978D4FDF3BUL, 0xD1B71758E219652CUL, 0xA7C5AC471B478423UL, /*005*/ 0x8637BD05AF6C69B6UL, 0xD6BF94D5E57A42BCUL, 0xABCC77118461CEFDUL, 0x89705F4136B4A597UL, 0xDBE6FECEBDEDD5BFUL, /*010*/ 0xAFEBFF0BCB24AAFFUL, 0x8CBCCC096F5088CCUL, 0xE12E13424BB40E13UL, 0xB424DC35095CD80FUL, 0x901D7CF73AB0ACD9UL, /*015*/ @@ -878,7 +878,7 @@ private static bool TryParseCore(ReadOnlySpan span, ref int ich, ref bool // Map from base-10 exponent to base-2 exponent. // The approximation for 10^n is _mpe10man[n-1] * 2^(_mpe10e2[n-1]-64). - private static short[] _mpe10e2 = new short[] { + private static readonly short[] _mpe10e2 = new short[] { 4, 7, 10, 14, 17, 20, 24, 27, 30, 34, 37, 40, 44, 47, 50, 54, 57, 60, 64, 67, /*020*/ 70, 74, 77, 80, 84, 87, 90, 94, 97, 100, 103, 107, 110, 113, 117, 120, 123, 127, 130, 133, /*040*/ 137, 140, 143, 147, 150, 153, 157, 160, 163, 167, 170, 173, 177, 180, 183, 187, 190, 193, 196, 200, /*060*/ diff --git a/src/Microsoft.ML.Core/Utilities/Stats.cs b/src/Microsoft.ML.Core/Utilities/Stats.cs index d4fc33c626..d42156fff0 100644 --- a/src/Microsoft.ML.Core/Utilities/Stats.cs +++ b/src/Microsoft.ML.Core/Utilities/Stats.cs @@ -42,7 +42,7 @@ public static long SampleLong(long rangeSize, Random rand) return result; } - private static double _vScale = 2 * Math.Sqrt(2 / Math.E); + private static readonly double _vScale = 2 * Math.Sqrt(2 / Math.E); /// /// Returns a number sampled from a zero-mean, unit variance Gaussian @@ -242,7 +242,7 @@ private static class BinoRand // n*p at which we switch algorithms private const int NPThresh = 10; - private static double[] _fctab = new double[] { + private static readonly double[] _fctab = new double[] { 0.08106146679532726, 0.04134069595540929, 0.02767792568499834, 0.02079067210376509, 0.01664469118982119, 0.01387612882307075, 0.01189670994589177, 0.01041126526197209, 0.009255462182712733, diff --git a/src/Microsoft.ML.Core/Utilities/TextReaderStream.cs b/src/Microsoft.ML.Core/Utilities/TextReaderStream.cs index beef79eca6..f4942a3cd7 100644 --- a/src/Microsoft.ML.Core/Utilities/TextReaderStream.cs +++ b/src/Microsoft.ML.Core/Utilities/TextReaderStream.cs @@ -28,7 +28,7 @@ internal sealed class TextReaderStream : Stream private string _line; private int _lineCur; - private byte[] _buff; + private readonly byte[] _buff; private int _buffCur; private int _buffLim; private bool _eof; diff --git a/src/Microsoft.ML.CpuMath/AlignedArray.cs b/src/Microsoft.ML.CpuMath/AlignedArray.cs index 71e9db2e4c..b9d3b53b06 100644 --- a/src/Microsoft.ML.CpuMath/AlignedArray.cs +++ b/src/Microsoft.ML.CpuMath/AlignedArray.cs @@ -28,7 +28,7 @@ internal sealed class AlignedArray private readonly int _cbAlign; // The alignment in bytes, a power of two, divisible by sizeof(Float). private int _base; // Where the values start in Items (changes to ensure alignment). - private object _lock; // Used to make sure only one thread can re-align the values. + private readonly object _lock; // Used to make sure only one thread can re-align the values. /// /// Allocate an aligned vector with the given alignment (in bytes). diff --git a/src/Microsoft.ML.Data/Data/DataViewTypeManager.cs b/src/Microsoft.ML.Data/Data/DataViewTypeManager.cs index 1517c33128..105a922ec5 100644 --- a/src/Microsoft.ML.Data/Data/DataViewTypeManager.cs +++ b/src/Microsoft.ML.Data/Data/DataViewTypeManager.cs @@ -23,7 +23,7 @@ public static class DataViewTypeManager /// For example, UInt32 and Key can be mapped to . This class enforces one-to-one mapping for all /// user-registered types. /// - private static HashSet _bannedRawTypes = new HashSet() + private static readonly HashSet _bannedRawTypes = new HashSet() { typeof(Boolean), typeof(SByte), typeof(Byte), typeof(Int16), typeof(UInt16), typeof(Int32), typeof(UInt32), @@ -36,17 +36,17 @@ public static class DataViewTypeManager /// /// Mapping from a plus its s to a . /// - private static Dictionary _rawTypeToDataViewTypeMap = new Dictionary(); + private static readonly Dictionary _rawTypeToDataViewTypeMap = new Dictionary(); /// /// Mapping from a to a plus its s. /// - private static Dictionary _dataViewTypeToRawTypeMap = new Dictionary(); + private static readonly Dictionary _dataViewTypeToRawTypeMap = new Dictionary(); /// /// The lock that one should acquire if the state of will be accessed or modified. /// - private static object _lock = new object(); + private static readonly object _lock = new object(); /// /// Returns the registered for and its . @@ -228,7 +228,7 @@ private class TypeWithAttributes /// a key when using as the key type in . Note that the /// uniqueness is determined by and below. /// - private DataViewTypeAttribute _associatedAttribute; + private readonly DataViewTypeAttribute _associatedAttribute; public TypeWithAttributes(Type type, DataViewTypeAttribute attribute) { diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs index 70f27303ea..324e31394a 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/BinarySaver.cs @@ -107,8 +107,8 @@ public static WritePipe Create(BinarySaver parent, DataViewRowCursor cursor, Col private sealed class WritePipe : WritePipe { - private ValueGetter _getter; - private IValueCodec _codec; + private readonly ValueGetter _getter; + private readonly IValueCodec _codec; private IValueWriter _writer; private MemoryStream _currentStream; private T _value; diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/Codecs.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/Codecs.cs index 5c9be86b3a..0b07b4bad4 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/Codecs.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/Codecs.cs @@ -313,7 +313,7 @@ public override IValueReader> OpenReader(Stream stream, int private sealed class Writer : ValueWriterBase> { private StringBuilder _builder; - private List _boundaries; + private readonly List _boundaries; public Writer(TextCodec codec, Stream stream) : base(codec.Factory, stream) @@ -353,7 +353,7 @@ private sealed class Reader : ValueReaderBase> private readonly int _entries; private readonly int[] _boundaries; private int _index; - private string _text; + private readonly string _text; public Reader(TextCodec codec, Stream stream, int items) : base(codec.Factory, stream) diff --git a/src/Microsoft.ML.Data/DataLoadSave/Binary/UnsafeTypeOps.cs b/src/Microsoft.ML.Data/DataLoadSave/Binary/UnsafeTypeOps.cs index 1e0757af09..e43a50aee8 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Binary/UnsafeTypeOps.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Binary/UnsafeTypeOps.cs @@ -25,7 +25,7 @@ internal abstract class UnsafeTypeOps internal static class UnsafeTypeOpsFactory { - private static Dictionary _type2ops; + private static readonly Dictionary _type2ops; static UnsafeTypeOpsFactory() { diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderParser.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderParser.cs index 241b40ea72..db005659da 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderParser.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextLoaderParser.cs @@ -249,7 +249,7 @@ private sealed class PrimitivePipe : ColumnPipe private readonly TryParseMapper _conv; // Has length Rows.Count, so indexed by irow. - private TResult[] _values; + private readonly TResult[] _values; public override bool HasNA { get; } @@ -436,7 +436,7 @@ public void Get(ref VBuffer dst) } // Has length Rows.Count, so indexed by irow. - private VectorValue[] _values; + private readonly VectorValue[] _values; public VectorPipe(RowSet rows, PrimitiveDataViewType type, TryParseMapper conv) : base(rows) diff --git a/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs b/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs index d0b212f8d8..0c10cb2b8e 100644 --- a/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs +++ b/src/Microsoft.ML.Data/DataLoadSave/Text/TextSaver.cs @@ -221,7 +221,7 @@ private sealed class ValueWriter : ValueWriterBase { private readonly ValueGetter _getSrc; private T _src; - private string _columnName; + private readonly string _columnName; public ValueWriter(DataViewRowCursor cursor, PrimitiveDataViewType type, int source, char sep) : base(type, source, sep) @@ -535,8 +535,8 @@ private sealed class State private int _dstPrev; // Map from column to starting destination index and slot. - private int[] _mpcoldst; - private int[] _mpcolslot; + private readonly int[] _mpcoldst; + private readonly int[] _mpcolslot; // "slot" is an index into _mpslotdst and _mpslotichLim. _mpslotdst is the sequence of // destination indices. _mpslotichLim is the sequence of upper bounds on the characters diff --git a/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs b/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs index 6e9aa55542..a6ce7d516a 100644 --- a/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs +++ b/src/Microsoft.ML.Data/DataView/CompositeRowToRowMapper.cs @@ -89,7 +89,7 @@ DataViewRow IRowToRowMapper.GetRow(DataViewRow input, IEnumerable _pred; + private readonly Func _pred; public SubsetActive(DataViewRow row, Func pred) { diff --git a/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs b/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs index b05d785ff3..d04185cc9d 100644 --- a/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs +++ b/src/Microsoft.ML.Data/DataView/DataViewConstructionUtils.cs @@ -686,7 +686,7 @@ protected override bool MoveNextCore() internal sealed class StreamingDataView : DataViewBase where TRow : class { - private IEnumerable _data; + private readonly IEnumerable _data; public StreamingDataView(IHostEnvironment env, IEnumerable data, InternalSchemaDefinition schemaDefn) : base(env, "StreamingDataView", schemaDefn) @@ -936,7 +936,7 @@ public override ValueGetter GetGetter() if (AnnotationType is VectorDataViewType annotationVectorType) { // VBuffer -> VBuffer - // REVIEW: Do we care about accomodating VBuffer -> VBuffer>? + // REVIEW: Do we care about accommodating VBuffer -> VBuffer>? Contracts.Assert(typeT.IsGenericType); Contracts.Check(typeof(TDst).IsGenericType); diff --git a/src/Microsoft.ML.Data/Dirty/PredictionUtils.cs b/src/Microsoft.ML.Data/Dirty/PredictionUtils.cs index 4f6275d6bc..4ab721898c 100644 --- a/src/Microsoft.ML.Data/Dirty/PredictionUtils.cs +++ b/src/Microsoft.ML.Data/Dirty/PredictionUtils.cs @@ -58,7 +58,7 @@ public static string CombineSettings(string[] settings, string[] extraSettings = return CmdParser.CombineSettings(settings) + " " + CmdParser.CombineSettings(SplitOnSemis(extraSettings)); } - private static char[] _dontSplitChars = new char[] { ' ', '=', '{', '}', '\t' }; + private static readonly char[] _dontSplitChars = new char[] { ' ', '=', '{', '}', '\t' }; // REVIEW: Deprecate this! public static string[] SplitOnSemis(string[] args) diff --git a/src/Microsoft.ML.Data/EntryPoints/EntryPointNode.cs b/src/Microsoft.ML.Data/EntryPoints/EntryPointNode.cs index b871f7ad99..ae7948aaa0 100644 --- a/src/Microsoft.ML.Data/EntryPoints/EntryPointNode.cs +++ b/src/Microsoft.ML.Data/EntryPoints/EntryPointNode.cs @@ -427,7 +427,7 @@ internal sealed class EntryPointNode public TimeSpan RunTime { get; internal set; } - private static Regex _stageIdRegex = new Regex(@"[a-zA-Z0-9]*", RegexOptions.Compiled); + private static readonly Regex _stageIdRegex = new Regex(@"[a-zA-Z0-9]*", RegexOptions.Compiled); private string _stageId; /// /// An alphanumeric string indicating the stage of a node. @@ -1060,7 +1060,7 @@ protected VariableBinding(string varName) // An EntryPoint variable can be followed with an array or dictionary specifier, which begins // with '[', contains either an integer or alphanumeric string, optionally wrapped in single-quotes, // followed with ']'. - private static Regex _variableRegex = new Regex( + private static readonly Regex _variableRegex = new Regex( @"\$(?[a-zA-Z_][a-zA-Z0-9_]*)(\[(((?[0-9]*))|(\'?(?[a-zA-Z0-9_]*)\'?))\])?", RegexOptions.Compiled); @@ -1193,7 +1193,7 @@ public override string ToJson() /// /// Represents the l-value assignable destination of a . - /// Subclasses exist to express the needed bindinds for subslots + /// Subclasses exist to express the needed bindings for subslots /// of a yet-to-be-constructed array or dictionary EntryPoint input parameter /// (for example, "myVar": ["$var1", "$var2"] would yield two : (myVar, 0), (myVar, 1)) /// diff --git a/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs b/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs index 53410ce4e5..d2529e6157 100644 --- a/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs +++ b/src/Microsoft.ML.Data/Evaluators/QuantileRegressionEvaluator.cs @@ -210,7 +210,7 @@ protected override VBuffer Zero() private readonly Counters _counters; private readonly Counters _weightedCounters; - private VBuffer> _slotNames; + private readonly VBuffer> _slotNames; public override CountersBase UnweightedCounters { get { return _counters; } } diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index d8874ead71..6efa48b778 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -1069,8 +1069,8 @@ internal sealed class NaiveCalibratorTrainer : ICalibratorTrainer { private readonly IHost _host; - private List _cMargins; - private List _ncMargins; + private readonly List _cMargins; + private readonly List _ncMargins; public int NumBins; public float BinSize; @@ -1846,7 +1846,7 @@ public ICalibratorTrainer CreateComponent(IHostEnvironment env) [BestFriend] internal sealed class PavCalibratorTrainer : CalibratorTrainerBase { - // a piece of the piecwise function + // a piece of the piecewise function private readonly struct Piece { public readonly float MinX; // end of interval. diff --git a/src/Microsoft.ML.Data/Prediction/CalibratorCatalog.cs b/src/Microsoft.ML.Data/Prediction/CalibratorCatalog.cs index a1de8d2e5d..836f428796 100644 --- a/src/Microsoft.ML.Data/Prediction/CalibratorCatalog.cs +++ b/src/Microsoft.ML.Data/Prediction/CalibratorCatalog.cs @@ -69,7 +69,7 @@ public abstract class CalibratorEstimatorBase : IEstimator : MapperBase, ISaveAsOnnx where TCalibrator : class, ICalibrator { - private TCalibrator _calibrator; + private readonly TCalibrator _calibrator; private readonly int _scoreColIndex; - private CalibratorTransformer _parent; - private string _scoreColumnName; + private readonly CalibratorTransformer _parent; + private readonly string _scoreColumnName; bool ICanSaveOnnx.CanSaveOnnx(OnnxContext ctx) => _calibrator is ICanSaveOnnx onnxMapper ? onnxMapper.CanSaveOnnx(ctx) : false; @@ -294,7 +294,7 @@ protected override DataViewSchema.DetachedColumn[] GetOutputColumnsCore() builder.Add(setIdCol.Name, setIdType, annotation.GetGetter(setIdCol)); // Now, this next one I'm a little less sure about. It is entirely reasonable for someone to, say, // try to calibrate the result of a regression or ranker training, or something else. But should we - // just pass through this class just like that? Having throught through the alternatives I view this + // just pass through this class just like that? Having thought through the alternatives I view this // as the least harmful thing we could be doing, but it is something to consider I may be wrong // about if it proves that it ever causes problems to, say, have something identified as a probability // column but be marked as being a regression task, or what have you. @@ -415,7 +415,7 @@ internal PlattCalibratorTransformer(IHostEnvironment env, ModelLoadContext ctx) } /// - /// The naive binning-based calbirator estimator. + /// The naive binning-based calibrator estimator. /// /// /// It divides the range of the outputs into equally sized bins. In each bin, diff --git a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculation.cs b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculation.cs index 860f4c0774..b21de997c4 100644 --- a/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculation.cs +++ b/src/Microsoft.ML.Data/Scorers/FeatureContributionCalculation.cs @@ -299,7 +299,7 @@ private sealed class RowMapper : ISchemaBoundRowMapper private readonly BindableMapper _parent; private readonly DataViewSchema _outputSchema; private readonly DataViewSchema _outputGenericSchema; - private VBuffer> _slotNames; + private readonly VBuffer> _slotNames; public RoleMappedSchema InputRoleMappedSchema { get; } diff --git a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs index 16937e0612..7f8788a546 100644 --- a/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs +++ b/src/Microsoft.ML.Data/Transforms/ColumnSelecting.cs @@ -90,7 +90,7 @@ private ColumnSelectingEstimator(IHostEnvironment env, params string[] keepColum /// If true will keep hidden columns and false will remove hidden columns. The argument is /// ignored if the Estimator is in "drop mode". /// If false will check for any columns given in - /// or that are missing from the input. If a missing colums exists a + /// or that are missing from the input. If a missing column exists a /// SchemaMistmatch exception is thrown. If true, the check is not made. internal ColumnSelectingEstimator(IHostEnvironment env, string[] keepColumns, string[] dropColumns, bool keepHidden = Defaults.KeepHidden, @@ -160,7 +160,7 @@ public sealed class ColumnSelectingTransformer : ITransformer internal const string ChooseLoaderSignatureOld = "ChooseColumnsFunction"; private readonly IHost _host; - private string[] _selectedColumns; + private readonly string[] _selectedColumns; bool ITransformer.IsRowToRowMapper => true; @@ -697,7 +697,7 @@ public DataViewRowCursor[] GetRowCursorSet(IEnumerable co var inputCols = ((IRowToRowMapper)this).GetDependencies(columnsNeeded); var inputs = Source.GetRowCursorSet(inputCols, n, rand); - // Build out the acitve state for the output + // Build out the active state for the output var active = Utils.BuildArray(_mapper.OutputSchema.Count, columnsNeeded); _host.AssertNonEmpty(inputs); diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs index a89bc62b09..26b6b35c05 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumnDbl.cs @@ -1830,7 +1830,7 @@ public sealed class BinOneColumnFunctionBuilder : OneColumnFunctionBuilderBase _values; + private readonly List _values; private BinOneColumnFunctionBuilder(IHost host, long lim, bool fix, int numBins, ValueGetter getSrc) : base(host, lim, getSrc) @@ -1874,7 +1874,7 @@ public sealed class BinVecColumnFunctionBuilder : VecColumnFunctionBuilderBase[] _values; + private readonly List[] _values; private BinVecColumnFunctionBuilder(IHost host, int cv, long lim, bool fix, int numBins, ValueGetter> getSrc) @@ -2097,7 +2097,7 @@ private RobustScalerVecFunctionBuilder(IHost host, long lim, int vectorSize, boo // The difference is how the min/max are used. _minMaxAggregator = new MinMaxDblAggregator(vectorSize); - // If we aren't centering data dont need the median. + // If we aren't centering data don't need the median. _medianAggregators = new MedianDblAggregator[vectorSize]; for (int i = 0; i < vectorSize; i++) diff --git a/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs b/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs index 849bfed257..861cfd7368 100644 --- a/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs +++ b/src/Microsoft.ML.Data/Transforms/NormalizeColumnSng.cs @@ -1993,7 +1993,7 @@ public sealed class BinOneColumnFunctionBuilder : OneColumnFunctionBuilderBase _values; + private readonly List _values; private BinOneColumnFunctionBuilder(IHost host, long lim, bool fix, int numBins, ValueGetter getSrc) : base(host, lim, getSrc) @@ -2037,7 +2037,7 @@ public sealed class BinVecColumnFunctionBuilder : VecColumnFunctionBuilderBase[] _values; + private readonly List[] _values; private BinVecColumnFunctionBuilder(IHost host, int cv, long lim, bool fix, int numBins, ValueGetter> getSrc) diff --git a/src/Microsoft.ML.Data/Transforms/Normalizer.cs b/src/Microsoft.ML.Data/Transforms/Normalizer.cs index 8594093d0c..325fb6519c 100644 --- a/src/Microsoft.ML.Data/Transforms/Normalizer.cs +++ b/src/Microsoft.ML.Data/Transforms/Normalizer.cs @@ -714,7 +714,7 @@ internal new IDataTransform MakeDataTransform(IDataView input) private sealed class Mapper : OneToOneMapperBase, ISaveAsOnnx, ISaveAsPfa { - private NormalizingTransformer _parent; + private readonly NormalizingTransformer _parent; public bool CanSaveOnnx(OnnxContext ctx) => true; public bool CanSavePfa => true; diff --git a/src/Microsoft.ML.Data/Transforms/ValueMapping.cs b/src/Microsoft.ML.Data/Transforms/ValueMapping.cs index b04334f472..3a217735ca 100644 --- a/src/Microsoft.ML.Data/Transforms/ValueMapping.cs +++ b/src/Microsoft.ML.Data/Transforms/ValueMapping.cs @@ -186,7 +186,7 @@ internal ValueMappingEstimator(IHostEnvironment env, IDataView lookupMap, DataVi internal class DataViewHelper { /// - /// Helper function to retrieve the Primitie type given a Type + /// Helper function to retrieve the Primitive type given a Type /// internal static PrimitiveDataViewType GetPrimitiveType(Type rawType, out bool isVectorType) { @@ -359,7 +359,7 @@ public class ValueMappingTransformer : OneToOneTransformerBase private const string DefaultMapName = "DefaultMap.idv"; internal static string DefaultKeyColumnName = "Key"; internal static string DefaultValueColumnName = "Value"; - private ValueMap _valueMap; + private readonly ValueMap _valueMap; private readonly byte[] _dataView; internal DataViewType ValueColumnType => _valueMap.ValueColumn.Type; @@ -834,7 +834,7 @@ private class ValueMap : ValueMap private static readonly FuncStaticMethodInfo1 _getValueMethodInfo = new FuncStaticMethodInfo1(GetValue); - private Dictionary _mapping; + private readonly Dictionary _mapping; private TValue _missingValue; private Dictionary CreateDictionary() diff --git a/src/Microsoft.ML.Ensemble/OutputCombiners/WeightedAverage.cs b/src/Microsoft.ML.Ensemble/OutputCombiners/WeightedAverage.cs index 35bd79b141..c7ce6e1dd6 100644 --- a/src/Microsoft.ML.Ensemble/OutputCombiners/WeightedAverage.cs +++ b/src/Microsoft.ML.Ensemble/OutputCombiners/WeightedAverage.cs @@ -46,7 +46,7 @@ public sealed class Options : ISupportBinaryOutputCombinerFactory public IBinaryOutputCombiner CreateComponent(IHostEnvironment env) => new WeightedAverage(env, this); } - private WeightageKind _weightageKind; + private readonly WeightageKind _weightageKind; public string WeightageMetricName { get { return _weightageKind.ToString(); } } diff --git a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseDiverseSelector.cs b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseDiverseSelector.cs index d82629b950..b6384fa504 100644 --- a/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseDiverseSelector.cs +++ b/src/Microsoft.ML.Ensemble/Selector/SubModelSelector/BaseDiverseSelector.cs @@ -19,7 +19,7 @@ public abstract class DiverseSelectorArguments : ArgumentsBase } private readonly IComponentFactory> _diversityMetricType; - private ConcurrentDictionary, TOutput[]> _predictions; + private readonly ConcurrentDictionary, TOutput[]> _predictions; private protected BaseDiverseSelector(IHostEnvironment env, DiverseSelectorArguments args, string name, IComponentFactory> diversityMetricType) diff --git a/src/Microsoft.ML.FastTree/Dataset/Dataset.cs b/src/Microsoft.ML.FastTree/Dataset/Dataset.cs index 63008267b7..cd9e338511 100644 --- a/src/Microsoft.ML.FastTree/Dataset/Dataset.cs +++ b/src/Microsoft.ML.FastTree/Dataset/Dataset.cs @@ -420,12 +420,12 @@ public DatasetSkeletonQueryDocData GetSubset(int[] docArray) /// public sealed class DatasetSkeleton { - private short[] _ratings; + private readonly short[] _ratings; public readonly int[] Boundaries; public readonly ulong[] QueryIds; public readonly ulong[] DocIds; public double[][] MaxDcg; - private int[] _docToQuery; + private readonly int[] _docToQuery; public Dictionary AuxiliaryData { get; set; } diff --git a/src/Microsoft.ML.FastTree/Dataset/DenseIntArray.cs b/src/Microsoft.ML.FastTree/Dataset/DenseIntArray.cs index 7157ac361e..6e3faac9a0 100644 --- a/src/Microsoft.ML.FastTree/Dataset/DenseIntArray.cs +++ b/src/Microsoft.ML.FastTree/Dataset/DenseIntArray.cs @@ -205,7 +205,7 @@ internal sealed class Dense10BitIntArray : DenseIntArray { private const int _bits = 10; private const int _mask = (1 << _bits) - 1; - private uint[] _data; + private readonly uint[] _data; public override IntArrayBits BitsPerItem { get { return IntArrayBits.Bits10; } } @@ -381,7 +381,7 @@ public override unsafe void Sumup(SumupInputData input, FeatureHistogram histogr /// 0-bit array only represents the value -1 internal sealed class Dense8BitIntArray : DenseDataCallbackIntArray { - private byte[] _data; + private readonly byte[] _data; public override IntArrayBits BitsPerItem { get { return IntArrayBits.Bits8; } } @@ -468,7 +468,7 @@ internal sealed class Dense4BitIntArray : DenseIntArray /// /// For a given byte, the high 4 bits is the first value, the low 4 bits is the next value. /// - private byte[] _data; + private readonly byte[] _data; public override IntArrayBits BitsPerItem { get { return IntArrayBits.Bits4; } } @@ -588,7 +588,7 @@ public void SumupNative(SumupInputData input, FeatureHistogram histogram) /// internal sealed class Dense16BitIntArray : DenseDataCallbackIntArray { - private ushort[] _data; + private readonly ushort[] _data; public override IntArrayBits BitsPerItem { get { return IntArrayBits.Bits16; } } @@ -677,7 +677,7 @@ public void SumupNative(SumupInputData input, FeatureHistogram histogram) /// internal sealed class Dense32BitIntArray : DenseDataCallbackIntArray { - private int[] _data; + private readonly int[] _data; public override IntArrayBits BitsPerItem { get { return IntArrayBits.Bits32; } } diff --git a/src/Microsoft.ML.FastTree/Dataset/RepeatIntArray.cs b/src/Microsoft.ML.FastTree/Dataset/RepeatIntArray.cs index c7f4304e8a..6c0aa87591 100644 --- a/src/Microsoft.ML.FastTree/Dataset/RepeatIntArray.cs +++ b/src/Microsoft.ML.FastTree/Dataset/RepeatIntArray.cs @@ -19,7 +19,7 @@ internal sealed class DeltaRepeatIntArray : IntArray { private readonly DenseIntArray _values; private readonly int _length; - private byte[] _deltas; + private readonly byte[] _deltas; private readonly int _deltasActualLength; public DeltaRepeatIntArray(int length, IntArrayBits bitsPerItem, IEnumerable values) diff --git a/src/Microsoft.ML.FastTree/FastTree.cs b/src/Microsoft.ML.FastTree/FastTree.cs index 65a8b0d68f..381b844bc1 100644 --- a/src/Microsoft.ML.FastTree/FastTree.cs +++ b/src/Microsoft.ML.FastTree/FastTree.cs @@ -2616,7 +2616,7 @@ public sealed class ForwardIndexer #if DEBUG // Holds for each feature the row index that it was previously accessed on. // Purely for validation purposes. - private int[] _lastRow; + private readonly int[] _lastRow; #endif /// diff --git a/src/Microsoft.ML.FastTree/FastTreeClassification.cs b/src/Microsoft.ML.FastTree/FastTreeClassification.cs index 66e34327da..78bcfdd914 100644 --- a/src/Microsoft.ML.FastTree/FastTreeClassification.cs +++ b/src/Microsoft.ML.FastTree/FastTreeClassification.cs @@ -140,7 +140,7 @@ public sealed partial class FastTreeBinaryTrainer : internal const string ShortName = "ftc"; private bool[] _trainSetLabels; - private double _sigmoidParameter; + private readonly double _sigmoidParameter; /// /// Initializes a new instance of @@ -325,7 +325,7 @@ internal sealed class ObjectiveImpl : ObjectiveFunctionBase, IStepSearch private readonly bool _unbalancedSets; //Should we use balanced or unbalanced loss function private readonly long _npos; private readonly long _nneg; - private IParallelTraining _parallelTraining; + private readonly IParallelTraining _parallelTraining; private readonly double _sigmoidParameter; // Parameter for scaling the loss public ObjectiveImpl( diff --git a/src/Microsoft.ML.FastTree/FastTreeRanking.cs b/src/Microsoft.ML.FastTree/FastTreeRanking.cs index 14a71e34ac..469c4a1762 100644 --- a/src/Microsoft.ML.FastTree/FastTreeRanking.cs +++ b/src/Microsoft.ML.FastTree/FastTreeRanking.cs @@ -88,7 +88,7 @@ public sealed partial class FastTreeRankingTrainer /// The name of the label column. /// The name of the feature column. /// The name for the column containing the group ID. - /// The name for the column containing the examle weight. + /// The name for the column containing the example weight. /// The max number of leaves in each regression tree. /// Total number of decision trees to create in the ensemble. /// The minimal number of examples allowed in a leaf of a regression tree, out of the subsampled data. @@ -266,7 +266,7 @@ private protected override void InitializeTests() { if (!FastTreeTrainerOptions.UseTolerantPruning) { - //use simple eraly stopping condition + //use simple early stopping condition PruningTest = new TestHistory(ValidTest, 0); } else @@ -363,7 +363,7 @@ private protected override string GetTestGraphLine() // We only print non-zero train&valid graph if earlyStoppingTruncation!=0 // In case /es is not set, we print 0 for train and valid graph NDCG - // Let's keeping this behaviour for backward compatibility with previous FR version + // Let's keeping this behavior for backward compatibility with previous FR version // Ideally /graphtv should enforce non-zero /es in the commandline validation if (_specialTrainSetTest != null) { @@ -385,7 +385,7 @@ private protected override void Train(IChannel ch) base.Train(ch); // Print final last iteration. // Note that trainNDCG printed in graph will be from copy of a value from previous iteration - // and will diffre slightly from the proper final value computed by FullTest. + // and will differ slightly from the proper final value computed by FullTest. // We cannot compute the final NDCG here due to the fact we use FastNDCGTestForTrainSet computing NDCG based on label sort saved during gradient computation (and we don;t have gradients for n+1 iteration) // Keeping it in sync with original FR code PrintTestGraph(ch); @@ -507,19 +507,19 @@ private enum DupeIdInfo private readonly double[] _discount; private readonly int[] _oneTwoThree; - private int[][] _labelCounts; + private readonly int[][] _labelCounts; // reusable memory, technical stuff - private int[][] _permutationBuffers; - private DcgPermutationComparer[] _comparers; + private readonly int[][] _permutationBuffers; + private readonly DcgPermutationComparer[] _comparers; //gains - private double[] _gain; + private readonly double[] _gain; private double[] _gainLabels; // parameters - private int _maxDcgTruncationLevel; - private bool _useDcg; + private readonly int _maxDcgTruncationLevel; + private readonly bool _useDcg; // A lookup table for the sigmoid used in the lambda calculation // Note: Is built for a specific sigmoid parameter, so assumes this will be constant throughout computation private double[] _sigmoidTable; @@ -533,9 +533,9 @@ private enum DupeIdInfo // Secondary gains, currently not used in any way. #pragma warning disable 0649 - private double _secondaryMetricShare; - private double[] _secondaryInverseMaxDcgt; - private double[] _secondaryGains; + private readonly double _secondaryMetricShare; + private readonly double[] _secondaryInverseMaxDcgt; + private readonly double[] _secondaryGains; #pragma warning restore 0649 // Baseline risk. @@ -546,19 +546,19 @@ private enum DupeIdInfo // 1. preprocessing the scores for continuous cost function // 2. shifted NDCG // 3. max DCG per query - private double[] _scoresCopy; - private short[] _labelsCopy; - private short[] _groupIdToTopLabel; + private readonly double[] _scoresCopy; + private readonly short[] _labelsCopy; + private readonly short[] _groupIdToTopLabel; // parameters - private double _sigmoidParam; - private char _costFunctionParam; - private bool _filterZeroLambdas; - - private bool _distanceWeight2; - private bool _normalizeQueryLambdas; - private bool _useShiftedNdcg; - private IParallelTraining _parallelTraining; + private readonly double _sigmoidParam; + private readonly char _costFunctionParam; + private readonly bool _filterZeroLambdas; + + private readonly bool _distanceWeight2; + private readonly bool _normalizeQueryLambdas; + private readonly bool _useShiftedNdcg; + private readonly IParallelTraining _parallelTraining; // Used for training NDCG calculation // Keeps track of labels of top 3 documents per query diff --git a/src/Microsoft.ML.FastTree/GamTrainer.cs b/src/Microsoft.ML.FastTree/GamTrainer.cs index c696e7e095..14f3bddf44 100644 --- a/src/Microsoft.ML.FastTree/GamTrainer.cs +++ b/src/Microsoft.ML.FastTree/GamTrainer.cs @@ -204,7 +204,7 @@ private protected GamTrainerBase(IHostEnvironment env, TOptions options, string Host.CheckParam(options.NumberOfThreads == null || options.NumberOfThreads > 0, nameof(options.NumberOfThreads), "Must be positive."); Host.CheckParam(0 <= options.EntropyCoefficient && options.EntropyCoefficient <= 1, nameof(options.EntropyCoefficient), "Must be in [0, 1]."); Host.CheckParam(0 <= options.GainConfidenceLevel && options.GainConfidenceLevel < 1, nameof(options.GainConfidenceLevel), "Must be in [0, 1)."); - Host.CheckParam(0 < options.MaximumBinCountPerFeature, nameof(options.MaximumBinCountPerFeature), "Must be posittive."); + Host.CheckParam(0 < options.MaximumBinCountPerFeature, nameof(options.MaximumBinCountPerFeature), "Must be positive."); Host.CheckParam(0 < options.NumberOfIterations, nameof(options.NumberOfIterations), "Must be positive."); Host.CheckParam(0 < options.MinimumExampleCountPerLeaf, nameof(options.MinimumExampleCountPerLeaf), "Must be positive."); @@ -608,7 +608,7 @@ private void InitializeThreads() private class LeafSplitHelper : ILeafSplitStatisticsCalculator { - private bool _hasWeights; + private readonly bool _hasWeights; public LeafSplitHelper(bool hasWeights) { diff --git a/src/Microsoft.ML.FastTree/Training/DcgCalculator.cs b/src/Microsoft.ML.FastTree/Training/DcgCalculator.cs index d65fc4b46f..029114d5fa 100644 --- a/src/Microsoft.ML.FastTree/Training/DcgCalculator.cs +++ b/src/Microsoft.ML.FastTree/Training/DcgCalculator.cs @@ -18,15 +18,15 @@ internal sealed class DcgCalculator private readonly int[] _oneTwoThree; // reusable memory - private int[][] _permutationBuffers; - private double[][] _scoreBuffers; - private DcgPermutationComparer[] _comparers; + private readonly int[][] _permutationBuffers; + private readonly double[][] _scoreBuffers; + private readonly DcgPermutationComparer[] _comparers; /// /// Contains the instances for a second Level comparer, which gets applied after the initial rank /// based ordering has happened. The array stores one second level comparer per thread. /// - private DescendingStableIdealComparer[] _secondLevelcomparers; + private readonly DescendingStableIdealComparer[] _secondLevelcomparers; private double _result; diff --git a/src/Microsoft.ML.FastTree/Training/DocumentPartitioning.cs b/src/Microsoft.ML.FastTree/Training/DocumentPartitioning.cs index 9b20841a16..65717d0b5e 100644 --- a/src/Microsoft.ML.FastTree/Training/DocumentPartitioning.cs +++ b/src/Microsoft.ML.FastTree/Training/DocumentPartitioning.cs @@ -22,7 +22,7 @@ internal sealed class DocumentPartitioning private readonly int[] _leafCount; private readonly int[] _documents; private int[] _tempDocuments; - private int[] _initialDocuments; + private readonly int[] _initialDocuments; /// /// Constructor diff --git a/src/Microsoft.ML.FastTree/Training/EnsembleCompression/LassoFit.cs b/src/Microsoft.ML.FastTree/Training/EnsembleCompression/LassoFit.cs index 4627d57ddf..35b0b9a3c1 100644 --- a/src/Microsoft.ML.FastTree/Training/EnsembleCompression/LassoFit.cs +++ b/src/Microsoft.ML.FastTree/Training/EnsembleCompression/LassoFit.cs @@ -33,7 +33,7 @@ internal sealed class LassoFit // Total number of passes over data public int NumberOfPasses; - private int _numFeatures; + private readonly int _numFeatures; public LassoFit(int numberOfLambdas, int maxAllowedFeaturesAlongPath, int numFeatures) { diff --git a/src/Microsoft.ML.FastTree/Training/OptimizationAlgorithms/GradientDescent.cs b/src/Microsoft.ML.FastTree/Training/OptimizationAlgorithms/GradientDescent.cs index 259106293a..e68b3f43c1 100644 --- a/src/Microsoft.ML.FastTree/Training/OptimizationAlgorithms/GradientDescent.cs +++ b/src/Microsoft.ML.FastTree/Training/OptimizationAlgorithms/GradientDescent.cs @@ -11,14 +11,14 @@ namespace Microsoft.ML.Trainers.FastTree { internal class GradientDescent : OptimizationAlgorithm { - private IGradientAdjuster _gradientWrapper; + private readonly IGradientAdjuster _gradientWrapper; /// number of trees dropped in this iteration private int _numberOfDroppedTrees; // treeScores stores for every tree the predictions it makes on every training example. This is used // to eliminate the need for computing the scores when we drop trees. However, it causes a horrifying // memory drain. - private List _treeScores; + private readonly List _treeScores; private double[] _droppedScores; private double[] _scores; diff --git a/src/Microsoft.ML.FastTree/Training/OptimizationAlgorithms/NoOptimizationAlgorithm.cs b/src/Microsoft.ML.FastTree/Training/OptimizationAlgorithms/NoOptimizationAlgorithm.cs index 8a8c361688..1341ea6534 100644 --- a/src/Microsoft.ML.FastTree/Training/OptimizationAlgorithms/NoOptimizationAlgorithm.cs +++ b/src/Microsoft.ML.FastTree/Training/OptimizationAlgorithms/NoOptimizationAlgorithm.cs @@ -11,8 +11,8 @@ namespace Microsoft.ML.Trainers.FastTree /// internal class RandomForestOptimizer : GradientDescent { - private IGradientAdjuster _gradientWrapper; - // REVIEW: When the FastTree appliation is decoupled with tree learner and boosting logic, this class should be removed. + private readonly IGradientAdjuster _gradientWrapper; + // REVIEW: When the FastTree application is decoupled with tree learner and boosting logic, this class should be removed. internal RandomForestOptimizer(InternalTreeEnsemble ensemble, Dataset trainData, double[] initTrainScores, IGradientAdjuster gradientWrapper) : base(ensemble, trainData, initTrainScores, gradientWrapper) { diff --git a/src/Microsoft.ML.FastTree/Training/ScoreTracker.cs b/src/Microsoft.ML.FastTree/Training/ScoreTracker.cs index c7311a60ad..fd9a55a778 100644 --- a/src/Microsoft.ML.FastTree/Training/ScoreTracker.cs +++ b/src/Microsoft.ML.FastTree/Training/ScoreTracker.cs @@ -135,13 +135,13 @@ public override void SetScores(double[] scores) throw Contracts.ExceptNotSupp("This code should not be reachable"); } - //Computes AGD specific mutiplier. Given that we have tree number t in ensemble (we count trees starting from 0) + //Computes AGD specific multiplier. Given that we have tree number t in ensemble (we count trees starting from 0) //And we have total k trees in ensemble, what should be the multiplier on the tree when sum the ensemble together based on AGD formula being //X[k+1] = Y[k] + Tree[k] //Y[k+1] = X[k+1] + C[k] * (X[k+1] – X[k]) //C[k] = (k-1) / (k+2) - private static Dictionary> _treeMultiplierMap = new Dictionary>(); + private static readonly Dictionary> _treeMultiplierMap = new Dictionary>(); public static double TreeMultiplier(int t, int k) { if (_treeMultiplierMap.ContainsKey(t)) diff --git a/src/Microsoft.ML.FastTree/Training/StepSearch.cs b/src/Microsoft.ML.FastTree/Training/StepSearch.cs index 91e8711221..68952f566d 100644 --- a/src/Microsoft.ML.FastTree/Training/StepSearch.cs +++ b/src/Microsoft.ML.FastTree/Training/StepSearch.cs @@ -16,8 +16,8 @@ internal interface IStepSearch internal sealed class LineSearch : IStepSearch, IFastTrainingScoresUpdate { private double _historicStepSize; - private int _numPostbracketSteps; - private double _minStepSize; + private readonly int _numPostbracketSteps; + private readonly double _minStepSize; public LineSearch(Test lossCalculator, int lossIndex, int numPostbracketSteps, double minStepSize) : this(lossCalculator, lossIndex) { _numPostbracketSteps = numPostbracketSteps; _minStepSize = minStepSize; } diff --git a/src/Microsoft.ML.FastTree/Training/Test.cs b/src/Microsoft.ML.FastTree/Training/Test.cs index a4ddc0356d..6002af51ac 100644 --- a/src/Microsoft.ML.FastTree/Training/Test.cs +++ b/src/Microsoft.ML.FastTree/Training/Test.cs @@ -273,11 +273,11 @@ public ValueIterationPair(int iteration, double sum) private readonly int _windowSize; private readonly double _tolerance; // Queue for moving window - private LinkedList _window; + private readonly LinkedList _window; // This queue keeps track of the iterations which are within tolerance from the best iteration // The first element of the queue is the early stopping candidate - private LinkedList _toleratedQueue; + private readonly LinkedList _toleratedQueue; // Average validation for the current window private double _currentWindowSum; diff --git a/src/Microsoft.ML.FastTree/Training/TreeLearners/FastForestLeastSquaresTreeLearner.cs b/src/Microsoft.ML.FastTree/Training/TreeLearners/FastForestLeastSquaresTreeLearner.cs index 7c715ed596..38b2a386df 100644 --- a/src/Microsoft.ML.FastTree/Training/TreeLearners/FastForestLeastSquaresTreeLearner.cs +++ b/src/Microsoft.ML.FastTree/Training/TreeLearners/FastForestLeastSquaresTreeLearner.cs @@ -9,8 +9,8 @@ namespace Microsoft.ML.Trainers.FastTree { internal class RandomForestLeastSquaresTreeLearner : LeastSquaresRegressionTreeLearner { - private int _quantileSampleCount; - private bool _quantileEnabled; + private readonly int _quantileSampleCount; + private readonly bool _quantileEnabled; public RandomForestLeastSquaresTreeLearner(Dataset trainData, int numLeaves, int minDocsInLeaf, Double entropyCoefficient, Double featureFirstUsePenalty, Double featureReusePenalty, Double softmaxTemperature, int histogramPoolSize, int randomSeed, Double splitFraction, bool allowEmptyTrees, diff --git a/src/Microsoft.ML.FastTree/Training/TreeLearners/LeastSquaresRegressionTreeLearner.cs b/src/Microsoft.ML.FastTree/Training/TreeLearners/LeastSquaresRegressionTreeLearner.cs index 0488cf2b17..cc80841fa0 100644 --- a/src/Microsoft.ML.FastTree/Training/TreeLearners/LeastSquaresRegressionTreeLearner.cs +++ b/src/Microsoft.ML.FastTree/Training/TreeLearners/LeastSquaresRegressionTreeLearner.cs @@ -39,7 +39,7 @@ internal class LeastSquaresRegressionTreeLearner : TreeLearner, ILeafSplitStatis public readonly double Bias; // Multithread task to find best threshold. - private IThreadTask _calculateLeafSplitCandidates; + private readonly IThreadTask _calculateLeafSplitCandidates; protected SplitInfo[] BestSplitInfoPerLeaf; protected HashSet CategoricalThresholds; @@ -74,7 +74,7 @@ internal class LeastSquaresRegressionTreeLearner : TreeLearner, ILeafSplitStatis // size of reserved memory private readonly long _sizeOfReservedMemory; - private IParallelTraining _parallelTraining; + private readonly IParallelTraining _parallelTraining; public int MaxCategoricalGroupsPerNode { get; } @@ -828,7 +828,7 @@ internal sealed class LeafSplitCandidates private double _sumWeights; private double _sumSquaredTargets; private int[] _docIndices; - private int[] _docIndicesCopy; + private readonly int[] _docIndicesCopy; public readonly FloatType[] Targets; public readonly double[] Weights; public readonly SplitInfo[] FeatureSplitInfo; diff --git a/src/Microsoft.ML.FastTree/TreeEnsemble/InternalRegressionTree.cs b/src/Microsoft.ML.FastTree/TreeEnsemble/InternalRegressionTree.cs index f153d5a197..bdfaabdcde 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsemble/InternalRegressionTree.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsemble/InternalRegressionTree.cs @@ -23,12 +23,12 @@ namespace Microsoft.ML.Trainers.FastTree internal class InternalRegressionTree { private double _maxOutput; - private double[] _splitGain; - private double[] _gainPValue; + private readonly double[] _splitGain; + private readonly double[] _gainPValue; /// /// The value of this non-leaf node, prior to split when it was a leaf. /// - private double[] _previousLeafValue; + private readonly double[] _previousLeafValue; // for each non-leaf, we keep the following data public float[] DefaultValueForMissing; @@ -820,7 +820,7 @@ public int GetLeaf(in VBuffer feat, ref List path) private float GetFeatureValue(float x, int node) { - // Not need to convert missing vaules. + // Not need to convert missing values. if (DefaultValueForMissing == null) return x; diff --git a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizationEstimator.cs b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizationEstimator.cs index c7248fe3c1..621be7f5e7 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizationEstimator.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizationEstimator.cs @@ -190,7 +190,7 @@ public sealed class Options : OptionsBase public TreeEnsembleModelParameters ModelParameters; }; - private TreeEnsembleModelParameters _modelParameters; + private readonly TreeEnsembleModelParameters _modelParameters; internal PretrainedTreeFeaturizationEstimator(IHostEnvironment env, Options options) : base(env, options) { diff --git a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs index 310d2f5ea7..9c6aa39c84 100644 --- a/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs +++ b/src/Microsoft.ML.FastTree/TreeEnsembleFeaturizer.cs @@ -84,14 +84,14 @@ private sealed class BoundMapper : ISchemaBoundRowMapper /// and the i-th vector element is the prediction value predicted by the i-th tree. /// If is , this output column may not be generated. /// - private string _treesColumnName; + private readonly string _treesColumnName; /// /// The 0-1 encoding of all leaf nodes' IDs. Its type is a vector of . If the given feature /// vector falls into the first leaf of the first tree, the first element in the 0-1 encoding would be 1. /// If is , this output column may not be generated. /// - private string _leavesColumnName; + private readonly string _leavesColumnName; /// /// The 0-1 encoding of the paths to the leaves. If the path to the first tree's leaf is node 1 (2nd node in the first tree), @@ -99,7 +99,7 @@ private sealed class BoundMapper : ISchemaBoundRowMapper /// would be 1. /// If is , this output column may not be generated. /// - private string _pathsColumnName; + private readonly string _pathsColumnName; public BoundMapper(IExceptionContext ectx, TreeEnsembleFeaturizerBindableMapper owner, RoleMappedSchema schema, string treesColumnName, string leavesColumnName, string pathsColumnName) @@ -230,7 +230,7 @@ private sealed class State private readonly int _numLeaves; private VBuffer _src; - private ValueGetter> _featureGetter; + private readonly ValueGetter> _featureGetter; private long _cachedPosition; private readonly int[] _leafIds; private readonly List[] _pathIds; diff --git a/src/Microsoft.ML.FastTree/Utils/BufferPoolManager.cs b/src/Microsoft.ML.FastTree/Utils/BufferPoolManager.cs index f885f12fef..251bcc396b 100644 --- a/src/Microsoft.ML.FastTree/Utils/BufferPoolManager.cs +++ b/src/Microsoft.ML.FastTree/Utils/BufferPoolManager.cs @@ -27,7 +27,7 @@ internal static class BufferPoolManager /// /// A dictionary containing all buffer pool types /// - private static ConcurrentDictionary>> _bufferPools = new ConcurrentDictionary>>(); + private static readonly ConcurrentDictionary>> _bufferPools = new ConcurrentDictionary>>(); /// /// Gets a buffer from the pool with at least the same size as passed as input parameter diff --git a/src/Microsoft.ML.FastTree/Utils/MappedObjectPool.cs b/src/Microsoft.ML.FastTree/Utils/MappedObjectPool.cs index 5eec6ef28c..c5fb6b58a9 100644 --- a/src/Microsoft.ML.FastTree/Utils/MappedObjectPool.cs +++ b/src/Microsoft.ML.FastTree/Utils/MappedObjectPool.cs @@ -12,10 +12,10 @@ namespace Microsoft.ML.Trainers.FastTree /// internal class MappedObjectPool where T : class { - private T[] _pool; - private int[] _map; - private int[] _inverseMap; - private int[] _lastAccessTime; + private readonly T[] _pool; + private readonly int[] _map; + private readonly int[] _inverseMap; + private readonly int[] _lastAccessTime; private int _time; /// diff --git a/src/Microsoft.ML.FastTree/Utils/ThreadTaskManager.cs b/src/Microsoft.ML.FastTree/Utils/ThreadTaskManager.cs index 331f3d6826..9a7b8b0150 100644 --- a/src/Microsoft.ML.FastTree/Utils/ThreadTaskManager.cs +++ b/src/Microsoft.ML.FastTree/Utils/ThreadTaskManager.cs @@ -12,7 +12,7 @@ namespace Microsoft.ML.Trainers.FastTree { internal static class ThreadTaskManager { - private static object _lockObject = new object(); + private static readonly object _lockObject = new object(); // REVIEW: Should this bother with number of threads? What should it do? public static int NumThreads { get; private set; } diff --git a/src/Microsoft.ML.Featurizers/Common.cs b/src/Microsoft.ML.Featurizers/Common.cs index 7bf8a07459..d01da58641 100644 --- a/src/Microsoft.ML.Featurizers/Common.cs +++ b/src/Microsoft.ML.Featurizers/Common.cs @@ -1,4 +1,8 @@ -using System; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.InteropServices; @@ -117,7 +121,7 @@ protected override bool ReleaseHandle() internal delegate bool DestroyTransformedDataNative(IntPtr output, out IntPtr errorHandle); internal class TransformedDataSafeHandle : SafeHandleZeroOrMinusOneIsInvalid { - private DestroyTransformedDataNative _destroySaveDataHandler; + private readonly DestroyTransformedDataNative _destroySaveDataHandler; public TransformedDataSafeHandle(IntPtr handle, DestroyTransformedDataNative destroyCppTransformerEstimator) : base(true) { @@ -137,7 +141,7 @@ protected override bool ReleaseHandle() internal delegate bool DestroyNativeTransformerEstimator(IntPtr estimator, out IntPtr errorHandle); internal class TransformerEstimatorSafeHandle : SafeHandleZeroOrMinusOneIsInvalid { - private DestroyNativeTransformerEstimator _destroyNativeTransformerEstimator; + private readonly DestroyNativeTransformerEstimator _destroyNativeTransformerEstimator; public TransformerEstimatorSafeHandle(IntPtr handle, DestroyNativeTransformerEstimator destroyNativeTransformerEstimator) : base(true) { SetHandle(handle); @@ -169,7 +173,7 @@ public SaveDataSafeHandle(IntPtr handle, IntPtr dataSize) : base(true) protected override bool ReleaseHandle() { - // Not sure what to do with error stuff here. There shoudln't ever be one though. + // Not sure what to do with error stuff here. There shouldn't ever be one though. return DestroyTransformerSaveDataNative(handle, _dataSize, out _); } } diff --git a/src/Microsoft.ML.Featurizers/DateTimeTransformer.cs b/src/Microsoft.ML.Featurizers/DateTimeTransformer.cs index bd406e4e60..998f1c4f43 100644 --- a/src/Microsoft.ML.Featurizers/DateTimeTransformer.cs +++ b/src/Microsoft.ML.Featurizers/DateTimeTransformer.cs @@ -245,8 +245,8 @@ public sealed class DateTimeTransformer : RowToRowTransformerBase, IDisposable internal const string ShortName = "DateTimeTransform"; internal const string LoadName = "DateTimeTransform"; internal const string LoaderSignature = "DateTimeTransform"; - private LongTypedColumn _column; - private DataViewSchema _schema; + private readonly LongTypedColumn _column; + private readonly DataViewSchema _schema; #endregion @@ -554,7 +554,7 @@ internal unsafe TypedColumn(string source, string prefix) internal abstract void CreateTransformerFromEstimator(DateTimeEstimator.HolidayList country); private protected abstract unsafe void CreateTransformerFromSavedDataHelper(byte* rawData, IntPtr dataSize); - private protected unsafe abstract bool CreateEstimatorHelper(byte* countryName, byte* dataRootDir, out IntPtr estimator, out IntPtr errorHandle); + private protected abstract unsafe bool CreateEstimatorHelper(byte* countryName, byte* dataRootDir, out IntPtr estimator, out IntPtr errorHandle); private protected abstract bool CreateTransformerFromEstimatorHelper(TransformerEstimatorSafeHandle estimator, out IntPtr transformer, out IntPtr errorHandle); private protected abstract bool DestroyEstimatorHelper(IntPtr estimator, out IntPtr errorHandle); private protected abstract bool DestroyTransformerHelper(IntPtr transformer, out IntPtr errorHandle); @@ -668,7 +668,7 @@ internal sealed class LongTypedColumn : TypedColumn } [DllImport("Featurizers", EntryPoint = "DateTimeFeaturizer_CreateEstimator"), SuppressUnmanagedCodeSecurity] - private static unsafe extern bool CreateEstimatorNative(byte* countryName, byte* dataRootDir, out IntPtr estimator, out IntPtr errorHandle); + private static extern unsafe bool CreateEstimatorNative(byte* countryName, byte* dataRootDir, out IntPtr estimator, out IntPtr errorHandle); [DllImport("Featurizers", EntryPoint = "DateTimeFeaturizer_DestroyEstimator"), SuppressUnmanagedCodeSecurity] private static extern bool DestroyEstimatorNative(IntPtr estimator, out IntPtr errorHandle); // Should ONLY be called by safe handle @@ -683,7 +683,7 @@ internal override unsafe void CreateTransformerFromEstimator(DateTimeEstimator.H } [DllImport("Featurizers", EntryPoint = "DateTimeFeaturizer_CreateTransformerFromSavedDataWithDataRoot"), SuppressUnmanagedCodeSecurity] - private static unsafe extern bool CreateTransformerFromSavedDataNative(byte* rawData, IntPtr bufferSize, byte* dataRootDir, out IntPtr transformer, out IntPtr errorHandle); + private static extern unsafe bool CreateTransformerFromSavedDataNative(byte* rawData, IntPtr bufferSize, byte* dataRootDir, out IntPtr transformer, out IntPtr errorHandle); private protected override unsafe void CreateTransformerFromSavedDataHelper(byte* rawData, IntPtr dataSize) { byte[] dataRoot; @@ -748,7 +748,7 @@ public override void Dispose() _transformerHandler.Dispose(); } - private protected unsafe override bool CreateEstimatorHelper(byte* countryName, byte* dataRootDir, out IntPtr estimator, out IntPtr errorHandle) => + private protected override unsafe bool CreateEstimatorHelper(byte* countryName, byte* dataRootDir, out IntPtr estimator, out IntPtr errorHandle) => CreateEstimatorNative(countryName, dataRootDir, out estimator, out errorHandle); private protected override bool CreateTransformerFromEstimatorHelper(TransformerEstimatorSafeHandle estimator, out IntPtr transformer, out IntPtr errorHandle) => diff --git a/src/Microsoft.ML.Featurizers/TimeSeriesImputer.cs b/src/Microsoft.ML.Featurizers/TimeSeriesImputer.cs index d2f828455a..60b5260abf 100644 --- a/src/Microsoft.ML.Featurizers/TimeSeriesImputer.cs +++ b/src/Microsoft.ML.Featurizers/TimeSeriesImputer.cs @@ -1,4 +1,8 @@ -using System; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; @@ -84,7 +88,7 @@ public static class TimeSeriesImputerExtensionClass /// boolean value representing if the row was created as a result of this operation or not. /// /// The imputation strategies that are currently supported are ForwardFill, where the last good value is propagated forward, Backfill, where the next good value is propagated backwards, - /// and Median, where the mathmatical median is used to fill in missing values. + /// and Median, where the mathematical median is used to fill in missing values. /// /// NOTE: It is not recommended to chain this multiple times. If a column is filtered, the default value is placed when a row is imputed, and the /// default value is not null. Thus any other TimeSeriesImputers will not be able to replace those values anymore causing essentially a very @@ -97,7 +101,7 @@ public static class TimeSeriesImputerExtensionClass /// public sealed class TimeSeriesImputerEstimator : IEstimator { - private Options _options; + private readonly Options _options; internal const string IsRowImputedColumnName = "IsRowImputed"; private readonly IHost _host; @@ -158,14 +162,14 @@ public enum ImputationStrategy : byte /// /// Method by which columns are selected for imputing values. - /// NoFilter takes all of the columns so you dont have to specify anything. + /// NoFilter takes all of the columns so you don't have to specify anything. /// Include only does the specified ImputationStrategy on the columns you specify. The other columns will get a default value. /// Exclude is the exact opposite of Include, and does the ImputationStrategy on all columns but the ones you specify, which will get the default value. /// public enum FilterMode : byte { /// - /// Takes all of the columns so you dont have to specify anything. + /// Takes all of the columns so you don't have to specify anything. /// NoFilter = 1, @@ -458,7 +462,7 @@ private unsafe TransformerEstimatorSafeHandle CreateTransformerFromEstimator(IDa if (!success) throw new Exception(GetErrorDetailsAndFreeNativeMemory(errorHandle)); - // Manually dispose of the IEnumerator since we dont have a using statement; + // Manually dispose of the IEnumerator since we don't have a using statement; cursor.Dispose(); return new TransformerEstimatorSafeHandle(transformer, DestroyTransformerNative); @@ -577,7 +581,7 @@ public void Dispose() // TODO: Update entry points [DllImport("Featurizers", EntryPoint = "TimeSeriesImputerFeaturizer_BinaryArchive_CreateEstimator", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] - private static unsafe extern bool CreateEstimatorNative(TypeId* grainTypes, IntPtr grainTypesSize, TypeId* dataTypes, IntPtr dataTypesSize, TimeSeriesImputerEstimator.ImputationStrategy strategy, bool* suppressTypeErrors, out IntPtr estimator, out IntPtr errorHandle); + private static extern unsafe bool CreateEstimatorNative(TypeId* grainTypes, IntPtr grainTypesSize, TypeId* dataTypes, IntPtr dataTypesSize, TimeSeriesImputerEstimator.ImputationStrategy strategy, bool* suppressTypeErrors, out IntPtr estimator, out IntPtr errorHandle); [DllImport("Featurizers", EntryPoint = "TimeSeriesImputerFeaturizer_BinaryArchive_DestroyEstimator", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] private static extern bool DestroyEstimatorNative(IntPtr estimator, out IntPtr errorHandle); // Should ONLY be called by safe handle @@ -589,7 +593,7 @@ public void Dispose() private static extern bool CompleteTrainingNative(TransformerEstimatorSafeHandle estimator, out IntPtr errorHandle); [DllImport("Featurizers", EntryPoint = "TimeSeriesImputerFeaturizer_BinaryArchive_Fit", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] - private static unsafe extern bool FitNative(TransformerEstimatorSafeHandle estimator, NativeBinaryArchiveData data, out FitResult fitResult, out IntPtr errorHandle); + private static extern unsafe bool FitNative(TransformerEstimatorSafeHandle estimator, NativeBinaryArchiveData data, out FitResult fitResult, out IntPtr errorHandle); [DllImport("Featurizers", EntryPoint = "TimeSeriesImputerFeaturizer_BinaryArchive_CreateTransformerFromEstimator", CallingConvention = CallingConvention.Cdecl), SuppressUnmanagedCodeSecurity] private static extern bool CreateTransformerFromEstimatorNative(TransformerEstimatorSafeHandle estimator, out IntPtr transformer, out IntPtr errorHandle); @@ -601,13 +605,13 @@ public void Dispose() private static extern bool CreateOnnxSaveDataNative(TransformerEstimatorSafeHandle transformer, out IntPtr buffer, out IntPtr bufferSize, out IntPtr error); [DllImport("Featurizers", EntryPoint = "TimeSeriesImputerFeaturizer_BinaryArchive_CreateTransformerFromSavedData"), SuppressUnmanagedCodeSecurity] - private static unsafe extern bool CreateTransformerFromSavedDataNative(byte* rawData, IntPtr bufferSize, out IntPtr transformer, out IntPtr errorHandle); + private static extern unsafe bool CreateTransformerFromSavedDataNative(byte* rawData, IntPtr bufferSize, out IntPtr transformer, out IntPtr errorHandle); [DllImport("Featurizers", EntryPoint = "TimeSeriesImputerFeaturizer_BinaryArchive_OnDataCompleted"), SuppressUnmanagedCodeSecurity] - private static unsafe extern bool OnDataCompletedNative(TransformerEstimatorSafeHandle estimator, out IntPtr errorHandle); + private static extern unsafe bool OnDataCompletedNative(TransformerEstimatorSafeHandle estimator, out IntPtr errorHandle); [DllImport("Featurizers", EntryPoint = "TimeSeriesImputerFeaturizer_BinaryArchive_GetState"), SuppressUnmanagedCodeSecurity] - private static unsafe extern bool GetStateNative(TransformerEstimatorSafeHandle estimator, out TrainingState trainingState, out IntPtr errorHandle); + private static extern unsafe bool GetStateNative(TransformerEstimatorSafeHandle estimator, out TrainingState trainingState, out IntPtr errorHandle); #endregion diff --git a/src/Microsoft.ML.Featurizers/TimeSeriesImputerDataView.cs b/src/Microsoft.ML.Featurizers/TimeSeriesImputerDataView.cs index df31cb7493..048445a0e1 100644 --- a/src/Microsoft.ML.Featurizers/TimeSeriesImputerDataView.cs +++ b/src/Microsoft.ML.Featurizers/TimeSeriesImputerDataView.cs @@ -22,7 +22,7 @@ namespace Microsoft.ML.Transforms internal sealed class TimeSeriesImputerDataView : IDataTransform { #region Typed Columns - private TimeSeriesImputerTransformer _parent; + private readonly TimeSeriesImputerTransformer _parent; public class SharedColumnState { public SharedColumnState() @@ -269,7 +269,7 @@ internal override TypeId GetTypeId() return typeof(T).GetNativeTypeIdFromType(); } - internal unsafe abstract T GetDataFromNativeBinaryArchiveData(byte* data, int offset); + internal abstract unsafe T GetDataFromNativeBinaryArchiveData(byte* data, int offset); } private abstract class NumericTypedColumn : TypedColumn @@ -308,7 +308,7 @@ private class ByteTypedColumn : NumericTypedColumn { } - internal unsafe override byte GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe byte GetDataFromNativeBinaryArchiveData(byte* data, int offset) { if (IsNullable) { @@ -329,7 +329,7 @@ private class SByteTypedColumn : NumericTypedColumn { } - internal unsafe override sbyte GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe sbyte GetDataFromNativeBinaryArchiveData(byte* data, int offset) { if (IsNullable) { @@ -350,7 +350,7 @@ private class ShortTypedColumn : NumericTypedColumn { } - internal unsafe override short GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe short GetDataFromNativeBinaryArchiveData(byte* data, int offset) { if (IsNullable) { @@ -371,7 +371,7 @@ private class UShortTypedColumn : NumericTypedColumn { } - internal unsafe override ushort GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe ushort GetDataFromNativeBinaryArchiveData(byte* data, int offset) { if (IsNullable) { @@ -392,7 +392,7 @@ private class IntTypedColumn : NumericTypedColumn { } - internal unsafe override int GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe int GetDataFromNativeBinaryArchiveData(byte* data, int offset) { if (IsNullable) { @@ -413,7 +413,7 @@ private class UIntTypedColumn : NumericTypedColumn { } - internal unsafe override uint GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe uint GetDataFromNativeBinaryArchiveData(byte* data, int offset) { if (IsNullable) { @@ -434,7 +434,7 @@ private class LongTypedColumn : NumericTypedColumn { } - internal unsafe override long GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe long GetDataFromNativeBinaryArchiveData(byte* data, int offset) { if (IsNullable) { @@ -455,7 +455,7 @@ private class ULongTypedColumn : NumericTypedColumn { } - internal unsafe override ulong GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe ulong GetDataFromNativeBinaryArchiveData(byte* data, int offset) { if (IsNullable) { @@ -476,7 +476,7 @@ private class FloatTypedColumn : NumericTypedColumn { } - internal unsafe override float GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe float GetDataFromNativeBinaryArchiveData(byte* data, int offset) { var bytes = new byte[sizeof(float)]; Marshal.Copy((IntPtr)(data + offset), bytes, 0, sizeof(float)); @@ -491,7 +491,7 @@ private class DoubleTypedColumn : NumericTypedColumn { } - internal unsafe override double GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe double GetDataFromNativeBinaryArchiveData(byte* data, int offset) { var bytes = new byte[sizeof(double)]; Marshal.Copy((IntPtr)(data + offset), bytes, 0, sizeof(double)); @@ -506,7 +506,7 @@ private class BoolTypedColumn : NumericTypedColumn { } - internal unsafe override bool GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe bool GetDataFromNativeBinaryArchiveData(byte* data, int offset) { if (IsNullable) { @@ -552,7 +552,7 @@ internal override void SerializeValue(BinaryWriter binaryWriter) binaryWriter.Write(stringBytes); } - internal unsafe override ReadOnlyMemory GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe ReadOnlyMemory GetDataFromNativeBinaryArchiveData(byte* data, int offset) { if (_isNullable) { @@ -608,7 +608,7 @@ internal override void SerializeValue(BinaryWriter binaryWriter) binaryWriter.Write(value); } - internal unsafe override DateTime GetDataFromNativeBinaryArchiveData(byte* data, int offset) + internal override unsafe DateTime GetDataFromNativeBinaryArchiveData(byte* data, int offset) { long value; if (_isNullable) @@ -656,7 +656,7 @@ internal override unsafe int GetDataSizeInBytes(byte* data, int currentOffset) internal class TransformedDataSafeHandle : SafeHandleZeroOrMinusOneIsInvalid { - private IntPtr _size; + private readonly IntPtr _size; public TransformedDataSafeHandle(IntPtr handle, IntPtr size) : base(true) { SetHandle(handle); @@ -728,7 +728,7 @@ public void Save(ModelSaveContext ctx) private sealed class Cursor : DataViewRowCursor { private readonly IChannelProvider _ch; - private DataViewRowCursor _input; + private readonly DataViewRowCursor _input; private long _position; private bool _isGood; private readonly Dictionary _allColumns; diff --git a/src/Microsoft.ML.ImageAnalytics/ImageGrayscale.cs b/src/Microsoft.ML.ImageAnalytics/ImageGrayscale.cs index 279d611ebd..7fc03117f1 100644 --- a/src/Microsoft.ML.ImageAnalytics/ImageGrayscale.cs +++ b/src/Microsoft.ML.ImageAnalytics/ImageGrayscale.cs @@ -160,7 +160,7 @@ private protected override void CheckInputColumn(DataViewSchema inputSchema, int private sealed class Mapper : OneToOneMapperBase { - private ImageGrayscalingTransformer _parent; + private readonly ImageGrayscalingTransformer _parent; public Mapper(ImageGrayscalingTransformer parent, DataViewSchema inputSchema) : base(parent.Host.Register(nameof(Mapper)), parent, inputSchema) diff --git a/src/Microsoft.ML.OnnxTransformer/OnnxMapType.cs b/src/Microsoft.ML.OnnxTransformer/OnnxMapType.cs index a7ae3cbfd8..034c13c042 100644 --- a/src/Microsoft.ML.OnnxTransformer/OnnxMapType.cs +++ b/src/Microsoft.ML.OnnxTransformer/OnnxMapType.cs @@ -49,8 +49,8 @@ public override int GetHashCode() /// public sealed class OnnxMapTypeAttribute : DataViewTypeAttribute { - private Type _keyType; - private Type _valueType; + private readonly Type _keyType; + private readonly Type _valueType; /// /// Create a map (aka dictionary) type. diff --git a/src/Microsoft.ML.OnnxTransformer/OnnxSequenceType.cs b/src/Microsoft.ML.OnnxTransformer/OnnxSequenceType.cs index a41fa2cd2c..9a57953094 100644 --- a/src/Microsoft.ML.OnnxTransformer/OnnxSequenceType.cs +++ b/src/Microsoft.ML.OnnxTransformer/OnnxSequenceType.cs @@ -55,7 +55,7 @@ public override int GetHashCode() /// public sealed class OnnxSequenceTypeAttribute : DataViewTypeAttribute { - private Type _elemType; + private readonly Type _elemType; // Make default constructor obsolete. // Use default constructor will left the _elemType field empty and cause exception in methods using _elemType. diff --git a/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs b/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs index c8c9aed970..154a3eeb6c 100644 --- a/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs +++ b/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs @@ -91,7 +91,7 @@ public OnnxVariableInfo GetOutput(string name) { var index = OutputNames.IndexOf(name); if (index < 0) - throw Contracts.ExceptParamValue(name, nameof(name), $"Onput tensor, {name}, does not exist in the ONNX model. " + + throw Contracts.ExceptParamValue(name, nameof(name), $"Ouput tensor, {name}, does not exist in the ONNX model. " + $"Available output names are [{string.Join(",", OutputNames)}]."); return OutputsInfo[index]; } @@ -423,7 +423,7 @@ private void Dispose(bool disposing) internal sealed class OnnxUtils { - private static HashSet _onnxTypeMap = + private static readonly HashSet _onnxTypeMap = new HashSet { typeof(Double), @@ -439,7 +439,7 @@ internal sealed class OnnxUtils typeof(SByte), typeof(Byte) }; - private static Dictionary _typeToKindMap = + private static readonly Dictionary _typeToKindMap = new Dictionary { { typeof(Single) , InternalDataKind.R4}, diff --git a/src/Microsoft.ML.Parquet/ParquetLoader.cs b/src/Microsoft.ML.Parquet/ParquetLoader.cs index 115a480e14..21707a3434 100644 --- a/src/Microsoft.ML.Parquet/ParquetLoader.cs +++ b/src/Microsoft.ML.Parquet/ParquetLoader.cs @@ -441,7 +441,7 @@ private sealed class Cursor : RootCursorBase private readonly ReaderOptions _readerOptions; private int _curDataSetRow; private IEnumerator _dataSetEnumerator; - private IEnumerator _blockEnumerator; + private readonly IEnumerator _blockEnumerator; private readonly IList[] _columnValues; private readonly Random _rand; diff --git a/src/Microsoft.ML.Parquet/PartitionedFileLoader.cs b/src/Microsoft.ML.Parquet/PartitionedFileLoader.cs index 77cf72f92e..59a2924e86 100644 --- a/src/Microsoft.ML.Parquet/PartitionedFileLoader.cs +++ b/src/Microsoft.ML.Parquet/PartitionedFileLoader.cs @@ -370,20 +370,20 @@ private sealed class Cursor : RootCursorBase private static readonly FuncInstanceMethodInfo1 _createGetterDelegateCoreMethodInfo = FuncInstanceMethodInfo1.Create(target => target.CreateGetterDelegateCore); - private PartitionedFileLoader _parent; + private readonly PartitionedFileLoader _parent; private readonly bool[] _active; private readonly bool[] _subActive; // Active columns of the sub-cursor. - private Delegate[] _getters; - private Delegate[] _subGetters; // Cached getters of the sub-cursor. + private readonly Delegate[] _getters; + private readonly Delegate[] _subGetters; // Cached getters of the sub-cursor. private readonly IEnumerable _columnsNeeded; private readonly IEnumerable _subActivecolumnsNeeded; - private ReadOnlyMemory[] _colValues; // Column values cached from the file path. + private readonly ReadOnlyMemory[] _colValues; // Column values cached from the file path. private DataViewRowCursor _subCursor; // Sub cursor of the current file. - private IEnumerator _fileOrder; + private readonly IEnumerator _fileOrder; public Cursor(IChannelProvider provider, PartitionedFileLoader parent, IMultiStreamSource files, IEnumerable columnsNeeded, Random rand) : base(provider) @@ -744,7 +744,7 @@ private bool TryParseValuesFromPath(string path, out List results) /// /// A base path. /// A list of files under the base path. - /// A realtive file path. + /// A relative file path. private string GetRelativePath(string basepath, IMultiStreamSource files) { Contracts.CheckNonEmpty(basepath, nameof(basepath)); diff --git a/src/Microsoft.ML.Parquet/PartitionedPathParser.cs b/src/Microsoft.ML.Parquet/PartitionedPathParser.cs index 96ec39a48d..444bcbbcca 100644 --- a/src/Microsoft.ML.Parquet/PartitionedPathParser.cs +++ b/src/Microsoft.ML.Parquet/PartitionedPathParser.cs @@ -98,8 +98,8 @@ private static VersionInfo GetVersionInfo() loaderAssemblyName: typeof(SimplePartitionedPathParser).Assembly.FullName); } - private IHost _host; - private PartitionedFileLoader.Column[] _columns; + private readonly IHost _host; + private readonly PartitionedFileLoader.Column[] _columns; public SimplePartitionedPathParser(IHostEnvironment env, Arguments args) { @@ -209,7 +209,7 @@ internal sealed class ParquetPartitionedPathParser : IPartitionedPathParser, ICa public const string LoadName = "ParquetPathParser"; public const string ShortName = "ParqPP"; - private IHost _host; + private readonly IHost _host; private PartitionedFileLoader.Column[] _columns; private static VersionInfo GetVersionInfo() diff --git a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs index 82b066865d..69d2f3b470 100644 --- a/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs +++ b/src/Microsoft.ML.Recommender/MatrixFactorizationPredictor.cs @@ -193,7 +193,7 @@ void ICanSaveModel.Save(ModelSaveContext ctx) void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) { writer.WriteLine("# Imputed matrix is P * Q'"); - writer.WriteLine("# P in R^({0} x {1}), rows correpond to Y item", NumberOfRows, ApproximationRank); + writer.WriteLine("# P in R^({0} x {1}), rows correspond to Y item", NumberOfRows, ApproximationRank); for (int i = 0; i < _leftFactorMatrix.Length; ++i) { writer.Write(_leftFactorMatrix[i].ToString("G")); @@ -202,7 +202,7 @@ void ICanSaveInTextFormat.SaveAsText(TextWriter writer, RoleMappedSchema schema) else writer.Write('\t'); } - writer.WriteLine("# Q in R^({0} x {1}), rows correpond to X item", NumberOfColumns, ApproximationRank); + writer.WriteLine("# Q in R^({0} x {1}), rows correspond to X item", NumberOfColumns, ApproximationRank); for (int i = 0; i < _rightFactorMatrix.Length; ++i) { writer.Write(_rightFactorMatrix[i].ToString("G")); @@ -326,7 +326,7 @@ private sealed class RowMapper : ISchemaBoundRowMapper // The tail "ColumnName" means the column name in IDataView private readonly string _matrixColumnIndexColumnName; private readonly string _matrixRowIndexColumnName; - private IHostEnvironment _env; + private readonly IHostEnvironment _env; public DataViewSchema InputSchema => InputRoleMappedSchema.Schema; public DataViewSchema OutputSchema { get; } @@ -436,7 +436,7 @@ public sealed class MatrixFactorizationPredictionTransformer : PredictionTransfo /// columns specified by , , , and . /// The output column is "Score" by default but user can append a string to it. /// - /// Eviroment object for showing information + /// Environment object for showing information /// The model trained by one of the training functions in /// Targeted schema that containing columns named as xColumnName /// The name of the column used as role in matrix factorization world diff --git a/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs b/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs index a4aeee9dc2..59b23ac4f5 100644 --- a/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs +++ b/src/Microsoft.ML.Recommender/SafeTrainingAndModelBuffer.cs @@ -92,7 +92,7 @@ private struct MFParameter /// /// Number of blocks that the training matrix is divided into. The parallel stochastic gradient /// method in LIBMF processes assigns each thread a block at one time. The ratings in one block - /// would be sequentially accessed (not randomaly accessed like standard stochastic gradient methods). + /// would be sequentially accessed (not randomly accessed like standard stochastic gradient methods). /// public int NrBins; @@ -147,7 +147,7 @@ private struct MFParameter public byte Quiet; /// - /// Set to false so that LIBMF may reuse and modifiy the data passed in. + /// Set to false so that LIBMF may reuse and modify the data passed in. /// public byte CopyData; } @@ -194,19 +194,19 @@ private unsafe struct MFModel private const string NativePath = "MatrixFactorizationNative"; [DllImport(NativePath), SuppressUnmanagedCodeSecurity] - private static unsafe extern void MFDestroyModel(ref MFModel* model); + private static extern unsafe void MFDestroyModel(ref MFModel* model); [DllImport(NativePath), SuppressUnmanagedCodeSecurity] - private static unsafe extern MFModel* MFTrain(MFProblem* prob, MFParameter* param); + private static extern unsafe MFModel* MFTrain(MFProblem* prob, MFParameter* param); [DllImport(NativePath), SuppressUnmanagedCodeSecurity] - private static unsafe extern MFModel* MFTrainWithValidation(MFProblem* tr, MFProblem* va, MFParameter* param); + private static extern unsafe MFModel* MFTrainWithValidation(MFProblem* tr, MFProblem* va, MFParameter* param); [DllImport(NativePath), SuppressUnmanagedCodeSecurity] - private static unsafe extern float MFCrossValidation(MFProblem* prob, int nrFolds, MFParameter* param); + private static extern unsafe float MFCrossValidation(MFProblem* prob, int nrFolds, MFParameter* param); [DllImport(NativePath), SuppressUnmanagedCodeSecurity] - private static unsafe extern float MFPredict(MFModel* model, int pIdx, int qIdx); + private static extern unsafe float MFPredict(MFModel* model, int pIdx, int qIdx); private MFParameter _mfParam; private unsafe MFModel* _pMFModel; diff --git a/src/Microsoft.ML.StandardTrainers/FactorizationMachine/FieldAwareFactorizationMachineModelParameters.cs b/src/Microsoft.ML.StandardTrainers/FactorizationMachine/FieldAwareFactorizationMachineModelParameters.cs index 45791efe88..b53d8ec2d7 100644 --- a/src/Microsoft.ML.StandardTrainers/FactorizationMachine/FieldAwareFactorizationMachineModelParameters.cs +++ b/src/Microsoft.ML.StandardTrainers/FactorizationMachine/FieldAwareFactorizationMachineModelParameters.cs @@ -28,7 +28,7 @@ public sealed class FieldAwareFactorizationMachineModelParameters : ModelParamet { internal const string LoaderSignature = "FieldAwareFactMacPredict"; private protected override PredictionKind PredictionKind => PredictionKind.BinaryClassification; - private bool _norm; + private readonly bool _norm; /// /// Get the number of fields. It's the symbol `m` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf @@ -41,7 +41,7 @@ public sealed class FieldAwareFactorizationMachineModelParameters : ModelParamet public int FeatureCount { get; } /// - /// Get the latent dimension. It's the tlngth of `v_{j, f}` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf + /// Get the latent dimension. It's the length of `v_{j, f}` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf /// public int LatentDimension { get; } @@ -65,7 +65,7 @@ private static VersionInfo GetVersionInfo() /// /// The host environment /// True if user wants to normalize feature vector to unit length. - /// The number of fileds, which is the symbol `m` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf + /// The number of fields, which is the symbol `m` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf /// The number of features, which is the symbol `n` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf /// The latent dimensions, which is the length of `v_{j, f}` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf /// The linear coefficients of the features, which is the symbol `w` in the doc: https://github.com/wschin/fast-ffm/blob/master/fast-ffm.pdf @@ -197,7 +197,7 @@ private protected override void SaveCore(ModelSaveContext ctx) // float[]: latent representation of features // REVIEW:FAFM needs to store the names of the features, so that they prediction data does not have the - // restriciton of the columns needing to be ordered the same as the training data. + // restriction of the columns needing to be ordered the same as the training data. Host.Assert(FieldCount > 0); Host.Assert(FeatureCount > 0); diff --git a/src/Microsoft.ML.StandardTrainers/Optimizer/DifferentiableFunction.cs b/src/Microsoft.ML.StandardTrainers/Optimizer/DifferentiableFunction.cs index 4806195531..c61019ab06 100644 --- a/src/Microsoft.ML.StandardTrainers/Optimizer/DifferentiableFunction.cs +++ b/src/Microsoft.ML.StandardTrainers/Optimizer/DifferentiableFunction.cs @@ -160,7 +160,7 @@ internal static class GradientTester // the optimal value of eps for the central difference approximation, Nocedal & Wright private const float Eps = (float)4.79e-6; - private static Random _r = new Random(5); + private static readonly Random _r = new Random(5); /// /// Tests the gradient reported by f. diff --git a/src/Microsoft.ML.StandardTrainers/Optimizer/LineSearch.cs b/src/Microsoft.ML.StandardTrainers/Optimizer/LineSearch.cs index 8fa4232508..2b6b59b4be 100644 --- a/src/Microsoft.ML.StandardTrainers/Optimizer/LineSearch.cs +++ b/src/Microsoft.ML.StandardTrainers/Optimizer/LineSearch.cs @@ -402,7 +402,7 @@ private float FindMinimum(Func func) internal sealed class BacktrackingLineSearch : IDiffLineSearch { private float _step; - private float _c1; + private readonly float _c1; /// /// Makes a backtracking line search @@ -439,9 +439,9 @@ public float Minimize(DiffFunc1D f, float initVal, float initDeriv) // possibly something we should put into our unit tests? internal static class Test { - private static VBuffer _c1; - private static VBuffer _c2; - private static VBuffer _c3; + private static readonly VBuffer _c1; + private static readonly VBuffer _c2; + private static readonly VBuffer _c3; private static float QuadTest(float x, out float deriv) { diff --git a/src/Microsoft.ML.StandardTrainers/Optimizer/OptimizationMonitor.cs b/src/Microsoft.ML.StandardTrainers/Optimizer/OptimizationMonitor.cs index 1b47a01d05..f8655b7369 100644 --- a/src/Microsoft.ML.StandardTrainers/Optimizer/OptimizationMonitor.cs +++ b/src/Microsoft.ML.StandardTrainers/Optimizer/OptimizationMonitor.cs @@ -195,7 +195,7 @@ internal sealed class MeanRelativeImprovementCriterion : ITerminationCriterion private readonly int _n; private readonly float _tol; private readonly int _maxIterations; - private Queue _pastValues; + private readonly Queue _pastValues; /// /// When criterion drops below this value, optimization is terminated @@ -371,7 +371,7 @@ public RelativeNormGradient(float tol = (float)1e-4) /// /// Returns true if the norm of the gradient, divided by the value, is less than the tolerance. /// - /// current state of the optimzer + /// current state of the optimizer /// the current value of the criterion /// true iff criterion is less than the tolerance public override bool Terminate(Optimizer.OptimizerState state, out string message) diff --git a/src/Microsoft.ML.StandardTrainers/Optimizer/Optimizer.cs b/src/Microsoft.ML.StandardTrainers/Optimizer/Optimizer.cs index ca6b677231..220138eed2 100644 --- a/src/Microsoft.ML.StandardTrainers/Optimizer/Optimizer.cs +++ b/src/Microsoft.ML.StandardTrainers/Optimizer/Optimizer.cs @@ -18,7 +18,7 @@ internal class Optimizer /// Based on Nocedal and Wright, "Numerical Optimization, Second Edition" protected readonly bool EnforceNonNegativity; - private ITerminationCriterion _staticTerm; + private readonly ITerminationCriterion _staticTerm; // Whether the optimizer state should keep its internal vectors dense or not. // Turning on dense internal vectors can relieve load on the garbage collector, diff --git a/src/Microsoft.ML.StandardTrainers/Optimizer/SgdOptimizer.cs b/src/Microsoft.ML.StandardTrainers/Optimizer/SgdOptimizer.cs index 285d0faf9a..a9e0a327ef 100644 --- a/src/Microsoft.ML.StandardTrainers/Optimizer/SgdOptimizer.cs +++ b/src/Microsoft.ML.StandardTrainers/Optimizer/SgdOptimizer.cs @@ -294,7 +294,7 @@ public GDOptimizer(DTerminate terminate, IDiffLineSearch lineSearch = null, bool private class LineFunc { - private bool _useCG; + private readonly bool _useCG; private VBuffer _point; private VBuffer _newPoint; @@ -309,7 +309,7 @@ private class LineFunc public float Value => _value; - private DifferentiableFunction _func; + private readonly DifferentiableFunction _func; public float Deriv => VectorUtils.DotProduct(in _dir, in _grad); diff --git a/src/Microsoft.ML.StandardTrainers/Standard/SdcaBinary.cs b/src/Microsoft.ML.StandardTrainers/Standard/SdcaBinary.cs index f0dd0b5e7c..a1e16ad283 100644 --- a/src/Microsoft.ML.StandardTrainers/Standard/SdcaBinary.cs +++ b/src/Microsoft.ML.StandardTrainers/Standard/SdcaBinary.cs @@ -416,7 +416,7 @@ private protected sealed override TModel TrainCore(IChannel ch, RoleMappedData d { // Note: At this point, 'count' may be less than the actual count of training examples. // We initialize the hash table with this partial size to avoid unnecessary rehashing. - // However, it does not mean there are exactly 'count' many trainining examples. + // However, it does not mean there are exactly 'count' many training examples. // Necessary rehashing will still occur as the hash table grows. idToIdx = new IdToIdxLookup(count); // Resetting 'count' to zero. @@ -500,7 +500,7 @@ private protected sealed override TModel TrainCore(IChannel ch, RoleMappedData d if (dualsLength <= Utils.ArrayMaxSize) { // The dual variables fit into a standard float[]. - // Also storing invariants in a starndard float[]. + // Also storing invariants in a standard float[]. duals = new StandardArrayDualsTable((int)dualsLength); int invariantsLength = (int)idLoMax + 1; Contracts.Assert(invariantsLength <= Utils.ArrayMaxSize); @@ -581,7 +581,7 @@ private protected sealed override TModel TrainCore(IChannel ch, RoleMappedData d if (featureNormSquared != null) featureNormSquared[idx] = normSquared; - // REVIEW: For log-loss, the default loss function for binary classifiation, a large number + // REVIEW: For log-loss, the default loss function for binary classification, a large number // of the invariants are 1. Maybe worth to consider a more efficient way to store the invariants // for log-loss. invariants[idx] = Loss.ComputeDualUpdateInvariant(invariantCoeff * normSquared * lambdaNInv * GetInstanceWeight(cursor)); @@ -600,7 +600,7 @@ private protected sealed override TModel TrainCore(IChannel ch, RoleMappedData d // Note that P.Invoke does not ensure that the actions executes in order even if maximum number of threads is set to 1. if (numThreads == 1) { - // The synchorized SDCA procedure. + // The synchronized SDCA procedure. for (iter = 0; iter < maxIterations; iter++) { if (converged) @@ -833,7 +833,7 @@ private void InitializeConvergenceMetrics(out string[] names, out Double[] initi var output = WDot(in features, in weights[0], biasReg[0] + biasUnreg[0]); var dualUpdate = Loss.DualUpdate(output, label, dual, invariant, numThreads); - // The successive over-relaxation apporach to adjust the sum of dual variables (biasReg) to zero. + // The successive over-relaxation approach to adjust the sum of dual variables (biasReg) to zero. // Reference to details: http://stat.rutgers.edu/home/tzhang/papers/ml02_dual.pdf pp. 16-17. var adjustment = l1ThresholdZero ? lr * biasReg[0] : lr * l1IntermediateBias[0]; dualUpdate -= adjustment; @@ -1047,7 +1047,7 @@ private protected abstract class DualsTableBase /// private sealed class StandardArrayDualsTable : DualsTableBase { - private float[] _duals; + private readonly float[] _duals; public override long Length => _duals.Length; @@ -1074,7 +1074,7 @@ public override void ApplyAt(long index, Visitor manip) /// private sealed class BigArrayDualsTable : DualsTableBase { - private BigArray _duals; + private readonly BigArray _duals; public override long Length => _duals.Length; @@ -1196,7 +1196,7 @@ public Entry(long itNext, DataViewRowId value) private long _count; // The entries. - private BigArray _entries; + private readonly BigArray _entries; /// /// Gets the count of id entries. @@ -2161,7 +2161,7 @@ private protected override TModel TrainCore(IChannel ch, RoleMappedData data, Li int iter = 0; pch.SetHeader(new ProgressHeader(new[] { "Loss", "Improvement" }, new[] { "iterations" }), entry => entry.SetProgress(0, iter, _options.NumberOfIterations)); - // Synchorized SGD. + // Synchronized SGD. for (int i = 0; i < _options.NumberOfIterations; i++) { iter = i; @@ -2423,7 +2423,7 @@ internal LegacySgdBinaryTrainer(IHostEnvironment env, Options options) } /// - /// leads to logistic regression which naturally supports probablity output. For other loss functions, + /// leads to logistic regression which naturally supports probability output. For other loss functions, /// a calibrator would be added after /// finishing its job. Therefore, we always have three output columns in the legacy world. /// diff --git a/src/Microsoft.ML.Sweeper/Algorithms/NelderMead.cs b/src/Microsoft.ML.Sweeper/Algorithms/NelderMead.cs index 074f704625..9bf6424431 100644 --- a/src/Microsoft.ML.Sweeper/Algorithms/NelderMead.cs +++ b/src/Microsoft.ML.Sweeper/Algorithms/NelderMead.cs @@ -73,7 +73,7 @@ private enum OptimizationStage private OptimizationStage _stage; private readonly List> _pendingSweeps; - private Queue> _pendingSweepsNotSubmitted; + private readonly Queue> _pendingSweepsNotSubmitted; private KeyValuePair _lastReflectionResult; private KeyValuePair _worst; diff --git a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs index a149daa1a6..fd556a175f 100644 --- a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs +++ b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs @@ -596,7 +596,7 @@ private sealed class Mapper : MapperBase if (typeValueCount % valCount != 0) throw Contracts.Except($"Input shape mismatch: Input '{_parent.Inputs[i]}' has shape {originalShape.ToString()}, but input data is of length {typeValueCount}."); - // This cover the 2-variable senario e.g. [?, ?, ?, C] where we can assume typeDims provides the information of [W, H, C] + // This cover the 2-variable scenario e.g. [?, ?, ?, C] where we can assume typeDims provides the information of [W, H, C] // The shape will become [?, W, H, C] var originalShapeDims = originalShape.dims; var originalShapeNdim = originalShape.ndim; @@ -869,7 +869,7 @@ private class TensorValueGetterVec : ITensorValueGetter private T[] _denseData; private T[] _bufferedData; private int _position; - private long[] _dims; + private readonly long[] _dims; private readonly long _bufferedDataSize; public TensorValueGetterVec(DataViewRow input, int colIndex, TensorShape tfShape) diff --git a/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs b/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs index 0c558634d5..faec243057 100644 --- a/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs +++ b/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs @@ -276,7 +276,7 @@ internal static bool IsSavedModel(IHostEnvironment env, string modelPath) } // Currently used in TensorFlowTransform to protect temporary folders used when working with TensorFlow's SavedModel format. - // Models are considered executable code, so we need to ACL tthe temp folders for high-rights process (so low-rights process can’t access it). + // Models are considered executable code, so we need to ACL the temp folders for high-rights process (so low-rights process can’t access it). /// /// Given a folder path, create it with proper ACL if it doesn't exist. /// Fails if the folder name is empty, or can't create the folder. @@ -516,15 +516,15 @@ internal static Tensor CastDataAndReturnAsTensor(T data) /// public class Runner : IDisposable { - private TF_Output[] _inputs; - private TF_Output[] _outputs; - private IntPtr[] _outputValues; - private IntPtr[] _inputValues; - private Tensor[] _inputTensors; - private IntPtr[] _operations; - private Session _session; - private Tensor[] _outputTensors; - private Status _status; + private readonly TF_Output[] _inputs; + private readonly TF_Output[] _outputs; + private readonly IntPtr[] _outputValues; + private readonly IntPtr[] _inputValues; + private readonly Tensor[] _inputTensors; + private readonly IntPtr[] _operations; + private readonly Session _session; + private readonly Tensor[] _outputTensors; + private readonly Status _status; internal Runner(Session session, TF_Output[] inputs = null, TF_Output[] outputs = null, IntPtr[] operations = null) { diff --git a/src/Microsoft.ML.TimeSeries/MovingAverageTransform.cs b/src/Microsoft.ML.TimeSeries/MovingAverageTransform.cs index aaaa0d2765..90c1054fee 100644 --- a/src/Microsoft.ML.TimeSeries/MovingAverageTransform.cs +++ b/src/Microsoft.ML.TimeSeries/MovingAverageTransform.cs @@ -49,7 +49,7 @@ public sealed class Arguments public string Weights = null; } - private int _lag; + private readonly int _lag; private static VersionInfo GetVersionInfo() { @@ -175,7 +175,7 @@ internal static Single ComputeMovingAverageNonUniform(FixedSizeQueue oth /// /// Finite Value: no infinite value in the sliding window and at least a non NaN value /// NaN value: only NaN values in the sliding window or +/- Infinite - /// Inifinite value: one infinite value in the sliding window (sign is no relevant) + /// Infinite value: one infinite value in the sliding window (sign is no relevant) /// internal static Single ComputeMovingAverageUniform(FixedSizeQueue others, Single input, int lag, Single lastDropped, ref Single currentSum, diff --git a/src/Microsoft.ML.TimeSeries/RootCauseAnalyzer.cs b/src/Microsoft.ML.TimeSeries/RootCauseAnalyzer.cs index 037e6fcfca..2d54c4fe0f 100644 --- a/src/Microsoft.ML.TimeSeries/RootCauseAnalyzer.cs +++ b/src/Microsoft.ML.TimeSeries/RootCauseAnalyzer.cs @@ -12,13 +12,13 @@ namespace Microsoft.ML.TimeSeries { internal class RootCauseAnalyzer { - private static double _anomalyRatioThreshold = 0.5; - private static double _anomalyPreDeltaThreshold = 2; + private static readonly double _anomalyRatioThreshold = 0.5; + private static readonly double _anomalyPreDeltaThreshold = 2; - private RootCauseLocalizationInput _src; - private double _beta; - private double _rootCauseThreshold; - private List _preparedCauses; + private readonly RootCauseLocalizationInput _src; + private readonly double _beta; + private readonly double _rootCauseThreshold; + private readonly List _preparedCauses; public RootCauseAnalyzer(RootCauseLocalizationInput src, double beta, double rootCauseThreshold) { @@ -163,7 +163,7 @@ private void LocalizeRootCausesByDimension(PointTree anomalyTree, PointTree poin } else { - //Use leaves node informatin to get top anomalies + //Use leaves node information to get top anomalies children = GetTopAnomaly(anomalyTree.Leaves, anomalyTree.ParentNode, pointTree.Leaves, dimension.DimensionKey, true); } diff --git a/src/Microsoft.ML.TimeSeries/STL/InnerStl.cs b/src/Microsoft.ML.TimeSeries/STL/InnerStl.cs index 59a3518cdd..eed09a230b 100644 --- a/src/Microsoft.ML.TimeSeries/STL/InnerStl.cs +++ b/src/Microsoft.ML.TimeSeries/STL/InnerStl.cs @@ -278,7 +278,7 @@ private void TrendSmooth(double[] deseasonSeries, int np, double[] t) /// internal class VirtualXValuesProvider { - private static Dictionary> _xValuesPool; + private static readonly Dictionary> _xValuesPool; static VirtualXValuesProvider() { diff --git a/src/Microsoft.ML.TimeSeries/SlidingWindowTransformBase.cs b/src/Microsoft.ML.TimeSeries/SlidingWindowTransformBase.cs index 78adaa4f9c..a766c1cc18 100644 --- a/src/Microsoft.ML.TimeSeries/SlidingWindowTransformBase.cs +++ b/src/Microsoft.ML.TimeSeries/SlidingWindowTransformBase.cs @@ -57,8 +57,8 @@ public sealed class Arguments : TransformInputBase } private readonly int _lag; - private BeginOptions _begin; - private TInput _nanValue; + private readonly BeginOptions _begin; + private readonly TInput _nanValue; protected SlidingWindowTransformBase(Arguments args, string loaderSignature, IHostEnvironment env, IDataView input) : base(args.WindowSize + args.Lag - 1, args.WindowSize + args.Lag - 1, args.Name, args.Source, loaderSignature, env, input) diff --git a/src/Microsoft.ML.TimeSeries/SrCnnAnomalyDetectionBase.cs b/src/Microsoft.ML.TimeSeries/SrCnnAnomalyDetectionBase.cs index ebcd914ab4..ce54fd3b0d 100644 --- a/src/Microsoft.ML.TimeSeries/SrCnnAnomalyDetectionBase.cs +++ b/src/Microsoft.ML.TimeSeries/SrCnnAnomalyDetectionBase.cs @@ -169,7 +169,7 @@ private protected override void LearnStateFromDataCore(FixedSizeQueue dat { } - private protected override sealed void SpectralResidual(Single input, FixedSizeQueue data, ref VBufferEditor result) + private protected sealed override void SpectralResidual(Single input, FixedSizeQueue data, ref VBufferEditor result) { // Step 1: Get backadd wave List backAddList = BackAdd(data); @@ -226,7 +226,7 @@ private protected override sealed void SpectralResidual(Single input, FixedSizeQ List filteredIfftMagList = AverageFilter(ifftMagList, Parent.JudgementWindowSize); // Step 7: Calculate score and set result - var score = CalculateSocre(ifftMagList[data.Count - 1], filteredIfftMagList[data.Count - 1]); + var score = CalculateScore(ifftMagList[data.Count - 1], filteredIfftMagList[data.Count - 1]); score /= 10.0f; result.Values[1] = score; @@ -283,7 +283,7 @@ private List AverageFilter(List data, int n) return cumSumList; } - private Single CalculateSocre(Single mag, Single avgMag) + private Single CalculateScore(Single mag, Single avgMag) { double safeDivisor = avgMag; if (safeDivisor < 1e-8) diff --git a/src/Microsoft.ML.TimeSeries/SrCnnEntireAnomalyDetector.cs b/src/Microsoft.ML.TimeSeries/SrCnnEntireAnomalyDetector.cs index 92c8b7c042..58b5442e13 100644 --- a/src/Microsoft.ML.TimeSeries/SrCnnEntireAnomalyDetector.cs +++ b/src/Microsoft.ML.TimeSeries/SrCnnEntireAnomalyDetector.cs @@ -35,7 +35,7 @@ public enum SrCnnDetectMode } /// - /// The Deseasonality modes of SrCnn models. The de-seasonality mode is envoked when the period of the series is greater than 0. + /// The Deseasonality modes of SrCnn models. The de-seasonality mode is invoked when the period of the series is greater than 0. /// public enum SrCnnDeseasonalityMode { @@ -267,7 +267,7 @@ internal sealed class Batch private List _previousBatch; private List _batch; private readonly int _outputLength; - private SrCnnEntireModeler _modeler; + private readonly SrCnnEntireModeler _modeler; private int _batchSize; private double[][] _results; private int _bLen; diff --git a/src/Microsoft.ML.TimeSeries/TrajectoryMatrix.cs b/src/Microsoft.ML.TimeSeries/TrajectoryMatrix.cs index 62d81cf6ec..a50898fa9f 100644 --- a/src/Microsoft.ML.TimeSeries/TrajectoryMatrix.cs +++ b/src/Microsoft.ML.TimeSeries/TrajectoryMatrix.cs @@ -62,10 +62,10 @@ internal sealed class TrajectoryMatrix private bool _isSeriesFftCached; private readonly bool _shouldFftUsed; - private IExceptionContext _ectx; + private readonly IExceptionContext _ectx; private readonly int _k; - private void ComputeBoundryIndices(int start, int end, out int us, out int ue, out int vs, out int ve) + private void ComputeBoundaryIndices(int start, int end, out int us, out int ue, out int vs, out int ve) { _ectx.Assert(0 <= end && end < _seriesLength, "The end index must be in [0, seriesLength)."); _ectx.Assert(0 <= start && start <= end, "The start index must be in [0, end index]."); @@ -385,7 +385,7 @@ private void NaiveMultiplyTranspose(Single[] vector, Single[] result, bool add = /// /// This function computes the the multiplication of the transpose of the trajectory matrix H by an arbitrary vector v, i.e. H' * v. /// Since the trajectory matrix is a Hankel matrix, using the Discrete Fourier Transform, - /// the multiplication is carried out in O(N.log(N)) instead of O(N^2), wheere N is the series length. + /// the multiplication is carried out in O(N.log(N)) instead of O(N^2), where N is the series length. /// For details, refer to Algorithm 3 in http://arxiv.org/pdf/0911.4498.pdf. /// /// The input vector @@ -485,7 +485,7 @@ public void MultiplyTranspose(Single[] vector, Single[] result, bool add = false s = start ?? 0; e = end ?? _seriesLength - 1; - ComputeBoundryIndices(s, e, out us, out ue, out vs, out ve); + ComputeBoundaryIndices(s, e, out us, out ue, out vs, out ve); _ectx.Assert(0 <= ue && ue < _windowSize); _ectx.Assert(0 <= us && us <= ue); _ectx.Assert(0 <= ve && ve < _k); @@ -558,7 +558,7 @@ public void MultiplyTranspose(Single[] vector, Single[] result, bool add = false s = start ?? 0; e = end ?? _seriesLength - 1; - ComputeBoundryIndices(s, e, out us, out ue, out vs, out ve); + ComputeBoundaryIndices(s, e, out us, out ue, out vs, out ve); _ectx.Assert(0 <= ue && ue < _windowSize); _ectx.Assert(0 <= us && us <= ue); _ectx.Assert(0 <= ve && ve < _k); diff --git a/src/Microsoft.ML.Transforms/Expression/CodeGen.cs b/src/Microsoft.ML.Transforms/Expression/CodeGen.cs index c97fb405bd..1703a63a7e 100644 --- a/src/Microsoft.ML.Transforms/Expression/CodeGen.cs +++ b/src/Microsoft.ML.Transforms/Expression/CodeGen.cs @@ -22,9 +22,9 @@ internal sealed partial class LambdaCompiler : IDisposable { public const int MaxParams = 16; - private LambdaNode _top; - private Type _delType; - private MethodGenerator _meth; + private readonly LambdaNode _top; + private readonly Type _delType; + private readonly MethodGenerator _meth; public static Delegate Compile(out List errors, LambdaNode node) { @@ -102,8 +102,8 @@ private sealed class Visitor : ExprVisitor private static readonly MethodInfo _methGetFalseBL = ((Func)BuiltinFunctions.False).GetMethodInfo(); private static readonly MethodInfo _methGetTrueBL = ((Func)BuiltinFunctions.True).GetMethodInfo(); - private MethodGenerator _meth; - private ILGenerator _gen; + private readonly MethodGenerator _meth; + private readonly ILGenerator _gen; private List _errors; private sealed class CachedWithLocal @@ -135,7 +135,7 @@ public CachedWithLocal(WithLocalNode node, LocalBuilder value, LocalBuilder flag // the value has been computed and stored yet. Lazy computed values avoid potentially // expensive computation that might not be needed, but result in code bloat since each // use tests the flag, and if false, computes and stores the value. - private List _cacheWith; + private readonly List _cacheWith; public Visitor(MethodGenerator meth) { diff --git a/src/Microsoft.ML.Transforms/Expression/KeyWordTable.cs b/src/Microsoft.ML.Transforms/Expression/KeyWordTable.cs index d8eaa016e2..14753f6125 100644 --- a/src/Microsoft.ML.Transforms/Expression/KeyWordTable.cs +++ b/src/Microsoft.ML.Transforms/Expression/KeyWordTable.cs @@ -26,8 +26,8 @@ public KeyWordKind(TokKind kind, bool isContextKeyWord) } private readonly NormStr.Pool _pool; - private Dictionary _mpnstrtidWord; - private Dictionary _mpnstrtidPunc; + private readonly Dictionary _mpnstrtidWord; + private readonly Dictionary _mpnstrtidPunc; public KeyWordTable(NormStr.Pool pool) { diff --git a/src/Microsoft.ML.Transforms/Expression/LambdaBinder.cs b/src/Microsoft.ML.Transforms/Expression/LambdaBinder.cs index b3b7980f5d..340cf37ba7 100644 --- a/src/Microsoft.ML.Transforms/Expression/LambdaBinder.cs +++ b/src/Microsoft.ML.Transforms/Expression/LambdaBinder.cs @@ -23,7 +23,7 @@ internal sealed partial class LambdaBinder : NodeVisitor { private readonly IHost _host; // The stack of active with nodes. - private List _rgwith; + private readonly List _rgwith; private List _errors; private LambdaNode _lambda; diff --git a/src/Microsoft.ML.Transforms/Expression/LexCharUtils.cs b/src/Microsoft.ML.Transforms/Expression/LexCharUtils.cs index 079e860234..c3b8996a51 100644 --- a/src/Microsoft.ML.Transforms/Expression/LexCharUtils.cs +++ b/src/Microsoft.ML.Transforms/Expression/LexCharUtils.cs @@ -97,7 +97,7 @@ public bool Is(LexCharKind kind) } // The mapping from character to CharInfo for characters less than 128. - private static LexCharInfo[] _rgchi; + private static readonly LexCharInfo[] _rgchi; static LexCharUtils() { diff --git a/src/Microsoft.ML.Transforms/Expression/Lexer.cs b/src/Microsoft.ML.Transforms/Expression/Lexer.cs index 495586158b..b6d13de0a3 100644 --- a/src/Microsoft.ML.Transforms/Expression/Lexer.cs +++ b/src/Microsoft.ML.Transforms/Expression/Lexer.cs @@ -47,9 +47,9 @@ private partial class LexerImpl private readonly Lexer _lex; private readonly CharCursor _cursor; - private StringBuilder _sb; // Used while building a token. + private readonly StringBuilder _sb; // Used while building a token. private int _ichMinTok; // The start of the current token. - private Queue _queue; // For multiple returns. + private readonly Queue _queue; // For multiple returns. #pragma warning disable 414 // This will be used by any pre-processor, so keep it around. private bool _fLineStart; diff --git a/src/Microsoft.ML.Transforms/Expression/MethodGenerator.cs b/src/Microsoft.ML.Transforms/Expression/MethodGenerator.cs index b2ebd1a21d..d4312663d6 100644 --- a/src/Microsoft.ML.Transforms/Expression/MethodGenerator.cs +++ b/src/Microsoft.ML.Transforms/Expression/MethodGenerator.cs @@ -68,7 +68,7 @@ public void Dispose() public struct Temporary : IDisposable { private Action _dispose; - private bool _isRef; + private readonly bool _isRef; // Should only be created by MethodGenerator. Too bad C# can't enforce this without // reversing the class nesting. diff --git a/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs b/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs index 55a51bd3ee..33bf4cad94 100644 --- a/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs +++ b/src/Microsoft.ML.Transforms/MissingValueReplacingUtils.cs @@ -373,8 +373,8 @@ private class ModeStat private TType _modeSoFar; private int _maxCount; - private Dictionary _valueCounts; - private IsValid _validityCheck; + private readonly Dictionary _valueCounts; + private readonly IsValid _validityCheck; public ModeStat(IsValid valid) { _modeSoFar = default; diff --git a/src/Microsoft.ML.Transforms/MutualInformationFeatureSelection.cs b/src/Microsoft.ML.Transforms/MutualInformationFeatureSelection.cs index 2f5248909d..a0c6203025 100644 --- a/src/Microsoft.ML.Transforms/MutualInformationFeatureSelection.cs +++ b/src/Microsoft.ML.Transforms/MutualInformationFeatureSelection.cs @@ -104,7 +104,7 @@ internal sealed class Options : TransformInputBase public int NumBins = Defaults.NumBins; } - private IHost _host; + private readonly IHost _host; private readonly (string outputColumnName, string inputColumnName)[] _columns; private readonly string _labelColumnName; private readonly int _slotsInOutput; diff --git a/src/Microsoft.ML.Transforms/OneHotHashEncoding.cs b/src/Microsoft.ML.Transforms/OneHotHashEncoding.cs index d32d9c8e86..50a50db6a0 100644 --- a/src/Microsoft.ML.Transforms/OneHotHashEncoding.cs +++ b/src/Microsoft.ML.Transforms/OneHotHashEncoding.cs @@ -124,7 +124,7 @@ internal sealed class Options : TransformInputBase /// Name of the output column. /// Name of the column to be transformed. If this is null '' will be used. /// Number of bits to hash into. Must be between 1 and 30, inclusive. - /// During hashing we constuct mappings between original values and the produced hash values. + /// During hashing we construct mappings between original values and the produced hash values. /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. /// 0 does not retain any input values. -1 retains all input values mapping to each hash. @@ -222,19 +222,19 @@ internal OneHotHashEncodingTransformer(HashingEstimator hash, IEstimator produces an [indicator vector](https://en.wikipedia.org/wiki/Indicator_vector). /// Each slot in this vector corresponds to a category in the dictionary, so its length is the size of the built dictionary. - /// If a value is not found in the dictioray, the output is the zero vector. + /// If a value is not found in the dictionary, the output is the zero vector. /// /// - produces one vector such that each slot stores the number - /// of occurances of the corresponding value in the input vector. + /// of occurrences of the corresponding value in the input vector. /// Each slot in this vector corresponds to a value in the dictionary, so its length is the size of the built dictionary. /// and /// differ simply in how the bit-vectors generated from individual slots in the input column are aggregated: /// for Indicator they are concatenated and for Bag they are added. When the source column is a Scalar, the Indicator and Bag options are identical. /// /// - produces keys in a column. - /// If the input column is a vector, the output contains a vectory [key](xref:Microsoft.ML.Data.KeyDataViewType) type, where each slot of the + /// If the input column is a vector, the output contains a vector [key](xref:Microsoft.ML.Data.KeyDataViewType) type, where each slot of the /// vector corresponds to the respective slot of the input vector. - /// If a category is not found in the bulit dictionary, it is assigned the value zero. + /// If a category is not found in the built dictionary, it is assigned the value zero. /// /// - produces a binary encoded vector to represent the values found in the dictionary /// that are present in the input column. If a value in the input column is not found in the dictionary, the output is the zero vector. @@ -278,7 +278,7 @@ internal sealed class ColumnOptions /// Number of bits to hash into. Must be between 1 and 31, inclusive. /// Hashing seed. /// Whether the position of each term should be included in the hash. - /// During hashing we constuct mappings between original values and the produced hash values. + /// During hashing we construct mappings between original values and the produced hash values. /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. /// 0 does not retain any input values. -1 retains all input values mapping to each hash. @@ -296,7 +296,7 @@ internal sealed class ColumnOptions private readonly IHost _host; private readonly IEstimator _toSomething; - private HashingEstimator _hash; + private readonly HashingEstimator _hash; /// /// Instantiates a new instance of . @@ -306,7 +306,7 @@ internal sealed class ColumnOptions /// Name of the column to transform. /// If set to , the value of the will be used as source. /// Number of bits to hash into. Must be between 1 and 30, inclusive. - /// During hashing we constuct mappings between original values and the produced hash values. + /// During hashing we construct mappings between original values and the produced hash values. /// Text representation of original values are stored in the slot names of the metadata for the new column.Hashing, as such, can map many initial values to one. /// specifies the upper bound of the number of distinct input values mapping to a hash that should be retained. /// 0 does not retain any input values. -1 retains all input values mapping to each hash. diff --git a/src/Microsoft.ML.Transforms/SvmLight/SvmLightLoader.cs b/src/Microsoft.ML.Transforms/SvmLight/SvmLightLoader.cs index 346d0acd8d..9d176e0115 100644 --- a/src/Microsoft.ML.Transforms/SvmLight/SvmLightLoader.cs +++ b/src/Microsoft.ML.Transforms/SvmLight/SvmLightLoader.cs @@ -442,7 +442,7 @@ private sealed class Cursor : RootCursorBase private int _fileIdx; private TextReader _currReader; private ReadOnlyMemory _text; - private ValueGetter> _getter; + private readonly ValueGetter> _getter; public override long Batch => 0; diff --git a/src/Microsoft.ML.Transforms/Text/LdaSingleBox.cs b/src/Microsoft.ML.Transforms/Text/LdaSingleBox.cs index e403992e1c..26a7d4105e 100644 --- a/src/Microsoft.ML.Transforms/Text/LdaSingleBox.cs +++ b/src/Microsoft.ML.Transforms/Text/LdaSingleBox.cs @@ -94,12 +94,12 @@ protected override bool ReleaseHandle() internal sealed class LdaSingleBox : IDisposable { - private LdaInterface.SafeLdaEngineHandle _engine; + private readonly LdaInterface.SafeLdaEngineHandle _engine; private bool _isDisposed; - private int[] _topics; - private int[] _probabilities; - private int[] _summaryTerm; - private float[] _summaryTermProb; + private readonly int[] _topics; + private readonly int[] _probabilities; + private readonly int[] _summaryTerm; + private readonly float[] _summaryTermProb; private readonly int _likelihoodInterval; private readonly float _alpha; private readonly float _beta; diff --git a/src/Microsoft.ML.Transforms/Text/LdaTransform.cs b/src/Microsoft.ML.Transforms/Text/LdaTransform.cs index cb6011c140..11c50d42ff 100644 --- a/src/Microsoft.ML.Transforms/Text/LdaTransform.cs +++ b/src/Microsoft.ML.Transforms/Text/LdaTransform.cs @@ -231,7 +231,7 @@ private sealed class LdaState : IDisposable private readonly object _preparationSyncRoot; private readonly object _testSyncRoot; private bool _predictionPreparationDone; - private LdaSingleBox _ldaTrainer; + private readonly LdaSingleBox _ldaTrainer; private LdaState() { diff --git a/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs b/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs index 8844e4391c..3c3e9a0a3a 100644 --- a/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs +++ b/src/Microsoft.ML.Transforms/Text/WordEmbeddingsExtractor.cs @@ -88,11 +88,11 @@ internal static VersionInfo GetVersionInfo() private readonly WordEmbeddingEstimator.PretrainedModelKind? _modelKind; private readonly string _modelFileNameWithPath; - private static object _embeddingsLock = new object(); + private static readonly object _embeddingsLock = new object(); private readonly bool _customLookup; private readonly int _linesToSkip; private readonly Model _currentVocab; - private static Dictionary> _vocab = new Dictionary>(); + private static readonly Dictionary> _vocab = new Dictionary>(); private sealed class Model { @@ -602,7 +602,7 @@ private ValueGetter> GetGetterVec(DataViewRow input, int iinfo) } } - private static Dictionary _modelsMetaData = new Dictionary() + private static readonly Dictionary _modelsMetaData = new Dictionary() { { WordEmbeddingEstimator.PretrainedModelKind.GloVe50D, "glove.6B.50d.txt" }, { WordEmbeddingEstimator.PretrainedModelKind.GloVe100D, "glove.6B.100d.txt" }, @@ -616,7 +616,7 @@ private ValueGetter> GetGetterVec(DataViewRow input, int iinfo) { WordEmbeddingEstimator.PretrainedModelKind.SentimentSpecificWordEmbedding, "sentiment.emd" } }; - private static Dictionary _linesToSkipInModels = new Dictionary() + private static readonly Dictionary _linesToSkipInModels = new Dictionary() { { WordEmbeddingEstimator.PretrainedModelKind.FastTextWikipedia300D, 1 } }; private string EnsureModelFile(IHostEnvironment env, out int linesToSkip, WordEmbeddingEstimator.PretrainedModelKind kind) @@ -786,12 +786,12 @@ public sealed class WordEmbeddingEstimator : IEstimator /// Extracts word embeddings. /// Output three times more values than dimension of the model specified in - /// First set of values represent minumum encountered values (for each dimension), second set represent average (for each dimension) + /// First set of values represent minimum encountered values (for each dimension), second set represent average (for each dimension) /// and third one represent maximum encountered values (for each dimension). /// /// The local instance of /// Name of the column resulting from the transformation of . - /// The path of the pre-trained embeedings model to use. + /// The path of the pre-trained embeddings model to use. /// Name of the column to transform. internal WordEmbeddingEstimator(IHostEnvironment env, string outputColumnName, string customModelFile, string inputColumnName = null) : this(env, customModelFile, new ColumnOptions(outputColumnName, inputColumnName ?? outputColumnName)) @@ -801,12 +801,12 @@ internal WordEmbeddingEstimator(IHostEnvironment env, string outputColumnName, s /// /// Extracts word embeddings. /// Output three times more values than dimension of the model specified in - /// First set of values represent minumum encountered values (for each dimension), second set represent average (for each dimension) + /// First set of values represent minimum encountered values (for each dimension), second set represent average (for each dimension) /// and third one represent maximum encountered values (for each dimension). /// /// The local instance of /// The embeddings to use. - /// The array columns, and per-column configurations to extract embeedings from. + /// The array columns, and per-column configurations to extract embeddings from. internal WordEmbeddingEstimator(IHostEnvironment env, PretrainedModelKind modelKind = PretrainedModelKind.SentimentSpecificWordEmbedding, params ColumnOptions[] columns) diff --git a/src/Microsoft.ML.Transforms/UngroupTransform.cs b/src/Microsoft.ML.Transforms/UngroupTransform.cs index 6fdfa94bf4..ab09e325ce 100644 --- a/src/Microsoft.ML.Transforms/UngroupTransform.cs +++ b/src/Microsoft.ML.Transforms/UngroupTransform.cs @@ -474,7 +474,7 @@ private sealed class Cursor : LinkedRootCursorBase // As a side effect, getters also populate these actual sizes of the necessary pivot columns on MoveNext. // Parallel to columns. - private int[] _colSizes; + private readonly int[] _colSizes; public Cursor(IChannelProvider provider, DataViewRowCursor input, UngroupBinding schema, Func predicate) : base(provider, input) diff --git a/src/Microsoft.ML.Vision/DnnRetrainTransform.cs b/src/Microsoft.ML.Vision/DnnRetrainTransform.cs index 810e984155..d172633057 100644 --- a/src/Microsoft.ML.Vision/DnnRetrainTransform.cs +++ b/src/Microsoft.ML.Vision/DnnRetrainTransform.cs @@ -46,14 +46,14 @@ internal sealed class DnnRetrainTransformer : RowToRowTransformerBase, IDisposab private readonly string _modelLocation; private readonly bool _isTemporarySavedModel; private readonly bool _addBatchDimensionInput; - private Session _session; + private readonly Session _session; private readonly DataViewType[] _outputTypes; private readonly TF_DataType[] _tfOutputTypes; private readonly TF_DataType[] _tfInputTypes; private readonly TensorShape[] _tfInputShapes; private readonly (Operation, int)[] _tfInputOperations; private readonly (Operation, int)[] _tfOutputOperations; - private TF_Output[] _tfInputNodes; + private readonly TF_Output[] _tfInputNodes; private readonly TF_Output[] _tfOutputNodes; private Graph Graph => _session.graph; private readonly Dictionary _idvToTfMapping; @@ -975,7 +975,7 @@ private class TensorValueGetter : ITensorValueGetter private readonly TensorShape _tfShape; private int _position; private readonly bool _keyType; - private long[] _dims; + private readonly long[] _dims; public TensorValueGetter(DataViewRow input, int colIndex, TensorShape tfShape, bool keyType = false) { @@ -1056,7 +1056,7 @@ private class TensorValueGetterVec : ITensorValueGetter private T[] _denseData; private T[] _bufferedData; private int _position; - private long[] _dims; + private readonly long[] _dims; private readonly long _bufferedDataSize; public TensorValueGetterVec(DataViewRow input, int colIndex, TensorShape tfShape) diff --git a/src/Microsoft.ML.Vision/ImageClassificationTrainer.cs b/src/Microsoft.ML.Vision/ImageClassificationTrainer.cs index 40a63ca780..f6082c53a9 100644 --- a/src/Microsoft.ML.Vision/ImageClassificationTrainer.cs +++ b/src/Microsoft.ML.Vision/ImageClassificationTrainer.cs @@ -222,7 +222,7 @@ public sealed class EarlyStopping /// /// The metric to be monitored (eg Accuracy, Loss). /// - private EarlyStoppingMetric _metric; + private readonly EarlyStoppingMetric _metric; /// /// Minimum change in the monitored quantity to be considered as an improvement. @@ -482,7 +482,7 @@ public sealed class Options : TrainerInputBaseWithLabel private Tensor _resizedImage; private string _jpegDataTensorName; private string _resizedImageTensorName; - private string _inputTensorName; + private readonly string _inputTensorName; private string _softmaxTensorName; private readonly string _checkpointPath; private readonly string _bottleneckOperationName; @@ -784,7 +784,7 @@ private static Tensor EncodeByteAsString(VBuffer buffer) internal sealed class ImageProcessor { - private Runner _imagePreprocessingRunner; + private readonly Runner _imagePreprocessingRunner; public ImageProcessor(Session session, string jpegDataTensorName, string resizeImageTensorName) { @@ -1463,8 +1463,8 @@ private protected override void SaveCore(ModelSaveContext ctx) private class Classifier { - private Runner _runner; - private ImageClassificationTrainer.ImageProcessor _imageProcessor; + private readonly Runner _runner; + private readonly ImageClassificationTrainer.ImageProcessor _imageProcessor; public Classifier(ImageClassificationModelParameters model) { diff --git a/test/Microsoft.Data.Analysis.Interactive.Tests/DataFrameInteractiveTests.cs b/test/Microsoft.Data.Analysis.Interactive.Tests/DataFrameInteractiveTests.cs index 033893819b..468e0897f2 100644 --- a/test/Microsoft.Data.Analysis.Interactive.Tests/DataFrameInteractiveTests.cs +++ b/test/Microsoft.Data.Analysis.Interactive.Tests/DataFrameInteractiveTests.cs @@ -1,10 +1,10 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Linq; -using Xunit; using Microsoft.DotNet.Interactive.Formatting; +using Xunit; namespace Microsoft.Data.Analysis.Interactive.Tests { diff --git a/test/Microsoft.Data.Analysis.Tests/BufferTests.cs b/test/Microsoft.Data.Analysis.Tests/BufferTests.cs index 96f568b6a4..5f482955f6 100644 --- a/test/Microsoft.Data.Analysis.Tests/BufferTests.cs +++ b/test/Microsoft.Data.Analysis.Tests/BufferTests.cs @@ -188,94 +188,94 @@ public void TestArrowStringColumnClone() Assert.Null(clone[i]); } -//#if !NETFRAMEWORK // https://github.com/dotnet/corefxlab/issues/2796 -// [Fact] -// public void TestPrimitiveColumnGetReadOnlyBuffers() -// { -// RecordBatch recordBatch = new RecordBatch.Builder() -// .Append("Column1", false, col => col.Int32(array => array.AppendRange(Enumerable.Range(0, 10)))).Build(); -// DataFrame df = DataFrame.FromArrowRecordBatch(recordBatch); - -// PrimitiveDataFrameColumn column = df.Columns["Column1"] as PrimitiveDataFrameColumn; - -// IEnumerable> buffers = column.GetReadOnlyDataBuffers(); -// IEnumerable> nullBitMaps = column.GetReadOnlyNullBitMapBuffers(); - -// long i = 0; -// IEnumerator> bufferEnumerator = buffers.GetEnumerator(); -// IEnumerator> nullBitMapsEnumerator = nullBitMaps.GetEnumerator(); -// while (bufferEnumerator.MoveNext() && nullBitMapsEnumerator.MoveNext()) -// { -// ReadOnlyMemory dataBuffer = bufferEnumerator.Current; -// ReadOnlyMemory nullBitMap = nullBitMapsEnumerator.Current; - -// ReadOnlySpan span = dataBuffer.Span; -// for (int j = 0; j < span.Length; j++) -// { -// // Each buffer has a max length of int.MaxValue -// Assert.Equal(span[j], column[j + i * int.MaxValue]); -// } - -// bool GetBit(byte curBitMap, int index) -// { -// return ((curBitMap >> (index & 7)) & 1) != 0; -// } -// ReadOnlySpan bitMapSpan = nullBitMap.Span; -// // No nulls in this column, so each bit must be set -// for (int j = 0; j < bitMapSpan.Length; j++) -// { -// for (int k = 0; k < 8; k++) -// { -// if (j * 8 + k == column.Length) -// break; -// Assert.True(GetBit(bitMapSpan[j], k)); -// } -// } -// i++; -// } -// } - -// [Fact] -// public void TestArrowStringColumnGetReadOnlyBuffers() -// { -// // Test ArrowStringDataFrameColumn. -// StringArray strArray = new StringArray.Builder().Append("foo").Append("bar").Build(); -// Memory dataMemory = new byte[] { 102, 111, 111, 98, 97, 114 }; -// Memory nullMemory = new byte[] { 1 }; -// Memory offsetMemory = new byte[] { 0, 0, 0, 0, 3, 0, 0, 0, 6, 0, 0, 0 }; - -// ArrowStringDataFrameColumn column = new ArrowStringDataFrameColumn("String", dataMemory, offsetMemory, nullMemory, strArray.Length, strArray.NullCount); - -// IEnumerable> dataBuffers = column.GetReadOnlyDataBuffers(); -// IEnumerable> nullBitMaps = column.GetReadOnlyNullBitMapBuffers(); -// IEnumerable> offsetsBuffers = column.GetReadOnlyOffsetsBuffers(); - -// using (IEnumerator> bufferEnumerator = dataBuffers.GetEnumerator()) -// using (IEnumerator> offsetsEnumerator = offsetsBuffers.GetEnumerator()) -// using (IEnumerator> nullBitMapsEnumerator = nullBitMaps.GetEnumerator()) -// { -// while (bufferEnumerator.MoveNext() && nullBitMapsEnumerator.MoveNext() && offsetsEnumerator.MoveNext()) -// { -// ReadOnlyMemory dataBuffer = bufferEnumerator.Current; -// ReadOnlyMemory nullBitMap = nullBitMapsEnumerator.Current; -// ReadOnlyMemory offsets = offsetsEnumerator.Current; - -// ReadOnlySpan dataSpan = dataBuffer.Span; -// ReadOnlySpan offsetsSpan = offsets.Span; -// int dataStart = 0; -// for (int j = 1; j < offsetsSpan.Length; j++) -// { -// int length = offsetsSpan[j] - offsetsSpan[j - 1]; -// ReadOnlySpan str = dataSpan.Slice(dataStart, length); -// ReadOnlySpan columnStr = dataMemory.Span.Slice(dataStart, length); -// Assert.Equal(str.Length, columnStr.Length); -// for (int s = 0; s < str.Length; s++) -// Assert.Equal(str[s], columnStr[s]); -// dataStart = length; -// } -// } -// } -// } -//#endif //!NETFRAMEWORK + //#if !NETFRAMEWORK // https://github.com/dotnet/corefxlab/issues/2796 + // [Fact] + // public void TestPrimitiveColumnGetReadOnlyBuffers() + // { + // RecordBatch recordBatch = new RecordBatch.Builder() + // .Append("Column1", false, col => col.Int32(array => array.AppendRange(Enumerable.Range(0, 10)))).Build(); + // DataFrame df = DataFrame.FromArrowRecordBatch(recordBatch); + + // PrimitiveDataFrameColumn column = df.Columns["Column1"] as PrimitiveDataFrameColumn; + + // IEnumerable> buffers = column.GetReadOnlyDataBuffers(); + // IEnumerable> nullBitMaps = column.GetReadOnlyNullBitMapBuffers(); + + // long i = 0; + // IEnumerator> bufferEnumerator = buffers.GetEnumerator(); + // IEnumerator> nullBitMapsEnumerator = nullBitMaps.GetEnumerator(); + // while (bufferEnumerator.MoveNext() && nullBitMapsEnumerator.MoveNext()) + // { + // ReadOnlyMemory dataBuffer = bufferEnumerator.Current; + // ReadOnlyMemory nullBitMap = nullBitMapsEnumerator.Current; + + // ReadOnlySpan span = dataBuffer.Span; + // for (int j = 0; j < span.Length; j++) + // { + // // Each buffer has a max length of int.MaxValue + // Assert.Equal(span[j], column[j + i * int.MaxValue]); + // } + + // bool GetBit(byte curBitMap, int index) + // { + // return ((curBitMap >> (index & 7)) & 1) != 0; + // } + // ReadOnlySpan bitMapSpan = nullBitMap.Span; + // // No nulls in this column, so each bit must be set + // for (int j = 0; j < bitMapSpan.Length; j++) + // { + // for (int k = 0; k < 8; k++) + // { + // if (j * 8 + k == column.Length) + // break; + // Assert.True(GetBit(bitMapSpan[j], k)); + // } + // } + // i++; + // } + // } + + // [Fact] + // public void TestArrowStringColumnGetReadOnlyBuffers() + // { + // // Test ArrowStringDataFrameColumn. + // StringArray strArray = new StringArray.Builder().Append("foo").Append("bar").Build(); + // Memory dataMemory = new byte[] { 102, 111, 111, 98, 97, 114 }; + // Memory nullMemory = new byte[] { 1 }; + // Memory offsetMemory = new byte[] { 0, 0, 0, 0, 3, 0, 0, 0, 6, 0, 0, 0 }; + + // ArrowStringDataFrameColumn column = new ArrowStringDataFrameColumn("String", dataMemory, offsetMemory, nullMemory, strArray.Length, strArray.NullCount); + + // IEnumerable> dataBuffers = column.GetReadOnlyDataBuffers(); + // IEnumerable> nullBitMaps = column.GetReadOnlyNullBitMapBuffers(); + // IEnumerable> offsetsBuffers = column.GetReadOnlyOffsetsBuffers(); + + // using (IEnumerator> bufferEnumerator = dataBuffers.GetEnumerator()) + // using (IEnumerator> offsetsEnumerator = offsetsBuffers.GetEnumerator()) + // using (IEnumerator> nullBitMapsEnumerator = nullBitMaps.GetEnumerator()) + // { + // while (bufferEnumerator.MoveNext() && nullBitMapsEnumerator.MoveNext() && offsetsEnumerator.MoveNext()) + // { + // ReadOnlyMemory dataBuffer = bufferEnumerator.Current; + // ReadOnlyMemory nullBitMap = nullBitMapsEnumerator.Current; + // ReadOnlyMemory offsets = offsetsEnumerator.Current; + + // ReadOnlySpan dataSpan = dataBuffer.Span; + // ReadOnlySpan offsetsSpan = offsets.Span; + // int dataStart = 0; + // for (int j = 1; j < offsetsSpan.Length; j++) + // { + // int length = offsetsSpan[j] - offsetsSpan[j - 1]; + // ReadOnlySpan str = dataSpan.Slice(dataStart, length); + // ReadOnlySpan columnStr = dataMemory.Span.Slice(dataStart, length); + // Assert.Equal(str.Length, columnStr.Length); + // for (int s = 0; s < str.Length; s++) + // Assert.Equal(str[s], columnStr[s]); + // dataStart = length; + // } + // } + // } + // } + //#endif //!NETFRAMEWORK } } diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameGroupByTests.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameGroupByTests.cs index 2319a9b64f..e1eba9da8d 100644 --- a/test/Microsoft.Data.Analysis.Tests/DataFrameGroupByTests.cs +++ b/test/Microsoft.Data.Analysis.Tests/DataFrameGroupByTests.cs @@ -17,9 +17,9 @@ public void TestGroupingWithTKeyTypeofString() { const int length = 11; - //Create test dataframe (numbers starting from 0 up to lenght) + //Create test dataframe (numbers starting from 0 up to length) DataFrame df = MakeTestDataFrameWithParityAndTensColumns(length); - + var grouping = df.GroupBy("Parity").Groupings; //Check groups count @@ -28,13 +28,13 @@ public void TestGroupingWithTKeyTypeofString() //Check number of elements in each group var oddGroup = grouping.Where(gr => gr.Key == "odd").FirstOrDefault(); Assert.NotNull(oddGroup); - Assert.Equal(length/2, oddGroup.Count()); + Assert.Equal(length / 2, oddGroup.Count()); var evenGroup = grouping.Where(gr => gr.Key == "even").FirstOrDefault(); Assert.NotNull(evenGroup); Assert.Equal(length / 2 + length % 2, evenGroup.Count()); - + } [Fact] @@ -45,7 +45,7 @@ public void TestGroupingWithTKey_CornerCases() var grouping = df.GroupBy("Parity").Groupings; Assert.Empty(grouping); - + df = MakeTestDataFrameWithParityAndTensColumns(1); grouping = df.GroupBy("Parity").Groupings; Assert.Single(grouping); @@ -58,7 +58,7 @@ public void TestGroupingWithTKeyPrimitiveType() { const int length = 55; - //Create test dataframe (numbers starting from 0 up to lenght) + //Create test dataframe (numbers starting from 0 up to length) DataFrame df = MakeTestDataFrameWithParityAndTensColumns(length); //Group elements by int column, that contain the amount of full tens in each int @@ -66,7 +66,7 @@ public void TestGroupingWithTKeyPrimitiveType() //Get the amount of all number based columns int numberColumnsCount = df.Columns.Count - 2; //except "Parity" and "Tens" columns - + //Check each group for (int i = 0; i < length / 10; i++) { @@ -90,10 +90,10 @@ public void TestGroupingWithTKeyPrimitiveType() [Fact] public void TestGroupingWithTKeyOfWrongType() - { + { var message = string.Empty; - //Create test dataframe (numbers starting from 0 up to lenght) + //Create test dataframe (numbers starting from 0 up to length) DataFrame df = MakeTestDataFrameWithParityAndTensColumns(1); //Use wrong type for grouping diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameIDataViewTests.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameIDataViewTests.cs index dea8099876..3264815293 100644 --- a/test/Microsoft.Data.Analysis.Tests/DataFrameIDataViewTests.cs +++ b/test/Microsoft.Data.Analysis.Tests/DataFrameIDataViewTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.cs b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.cs index b19b1ce5d9..e3794acb82 100644 --- a/test/Microsoft.Data.Analysis.Tests/DataFrameTests.cs +++ b/test/Microsoft.Data.Analysis.Tests/DataFrameTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -1113,7 +1113,7 @@ void Verify(DataFrame sortedDataFrame) { Assert.Null(value); } - + for (int i = 0; i < sortedDataFrame.Columns.Count; i++) { string columnName = sortedDataFrame.Columns[i].Name; @@ -1458,7 +1458,7 @@ public void TestGroupBy() } [Fact] - public void TestGoupByDifferentColumnTypes() + public void TestGroupByDifferentColumnTypes() { void GroupCountAndAssert(DataFrame frame) { @@ -1818,8 +1818,8 @@ public void TestMerge() DataFrame left = MakeDataFrameWithAllMutableColumnTypes(10); DataFrame right = MakeDataFrameWithAllMutableColumnTypes(5); - // Tests with right.Rows.Count < left.Rows.Count - // Left merge + // Tests with right.Rows.Count < left.Rows.Count + // Left merge DataFrame merge = left.Merge(right, "Int", "Int"); Assert.Equal(10, merge.Rows.Count); Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count); @@ -1827,7 +1827,7 @@ public void TestMerge() Assert.Null(merge.Columns["Int_left"][5]); VerifyMerge(merge, left, right, JoinAlgorithm.Left); - // Right merge + // Right merge merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.Right); Assert.Equal(5, merge.Rows.Count); Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count); @@ -1835,14 +1835,14 @@ public void TestMerge() Assert.Null(merge.Columns["Int_right"][2]); VerifyMerge(merge, left, right, JoinAlgorithm.Right); - // Outer merge + // Outer merge merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.FullOuter); Assert.Equal(merge.Rows.Count, left.Rows.Count); Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count); Assert.Null(merge.Columns["Int_right"][6]); VerifyMerge(merge, left, right, JoinAlgorithm.FullOuter); - // Inner merge + // Inner merge merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.Inner); Assert.Equal(merge.Rows.Count, right.Rows.Count); Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count); @@ -1850,8 +1850,8 @@ public void TestMerge() Assert.Null(merge.Columns["Int_right"][4]); VerifyMerge(merge, left, right, JoinAlgorithm.Inner); - // Tests with right.Rows.Count > left.Rows.Count - // Left merge + // Tests with right.Rows.Count > left.Rows.Count + // Left merge right = MakeDataFrameWithAllMutableColumnTypes(15); merge = left.Merge(right, "Int", "Int"); Assert.Equal(merge.Rows.Count, left.Rows.Count); @@ -1859,7 +1859,7 @@ public void TestMerge() Assert.Equal(merge.Columns["Int_right"][6], right.Columns["Int"][6]); VerifyMerge(merge, left, right, JoinAlgorithm.Left); - // Right merge + // Right merge merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.Right); Assert.Equal(merge.Rows.Count, right.Rows.Count); Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count); @@ -1867,7 +1867,7 @@ public void TestMerge() Assert.Null(merge.Columns["Int_left"][12]); VerifyMerge(merge, left, right, JoinAlgorithm.Right); - // Outer merge + // Outer merge merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.FullOuter); Assert.Equal(16, merge.Rows.Count); Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count); @@ -1875,7 +1875,7 @@ public void TestMerge() Assert.Null(merge.Columns["Int_left"][15]); VerifyMerge(merge, left, right, JoinAlgorithm.FullOuter); - // Inner merge + // Inner merge merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.Inner); Assert.Equal(9, merge.Rows.Count); Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count); @@ -2017,14 +2017,14 @@ public void TestMergeEdgeCases_Outer() * 0 0 0 * 1 5 1 * null null 2 - * null(3) null(3) 3 + * null(3) null(3) 3 * 4 6 4 */ /* * Merge will result in a DataFrame like: * Int_left: Int_right: Merged: Index: - * 0 0 0 - 0 0 + * 0 0 0 - 0 0 * 1 null 1 - N 1 * null null 2 - 2 2 * null null(3) 2 - 3 3 @@ -2038,7 +2038,7 @@ public void TestMergeEdgeCases_Outer() DataFrame merge = left.Merge(right, "Int", "Int", joinAlgorithm: JoinAlgorithm.FullOuter); Assert.Equal(9, merge.Rows.Count); Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count); - + int[] mergeRows = new int[] { 0, 2, 3, 4, 5 }; int[] leftRows = new int[] { 0, 2, 2, 3, 3 }; int[] rightRows = new int[] { 0, 2, 3, 2, 3 }; @@ -2077,8 +2077,8 @@ public void TestMerge_ByTwoColumns_Complex_LeftJoin() //Arrange var left = new DataFrame(); left.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2, 3, 4, 5 })); - left.Columns.Add (new Int32DataFrameColumn("G1", new[] { 0, 1, 1, 2, 2, 3 })); - left.Columns.Add (new Int32DataFrameColumn("G2", new[] { 3, 1, 2, 1, 2, 1})); + left.Columns.Add(new Int32DataFrameColumn("G1", new[] { 0, 1, 1, 2, 2, 3 })); + left.Columns.Add(new Int32DataFrameColumn("G2", new[] { 3, 1, 2, 1, 2, 1 })); var right = new DataFrame(); right.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2, 3 })); @@ -2090,11 +2090,11 @@ public void TestMerge_ByTwoColumns_Complex_LeftJoin() * Left | Right * I G1 G2 | I G1 G2 * ------------------------- - * 0 0 3 | 0 1 1 - * 1 1 1 | 1 1 2 + * 0 0 3 | 0 1 1 + * 1 1 1 | 1 1 2 * 2 1 2 | 2 1 1 * 3 2 1 | 3 2 1 - * 4 2 2 + * 4 2 2 * 5 3 1 */ @@ -2127,9 +2127,9 @@ public void TestMerge_ByTwoColumns_Complex_LeftJoin() Assert.Equal(expectedMerged.Length, merge.Rows.Count); Assert.Equal(merge.Columns.Count, left.Columns.Count + right.Columns.Count); - + for (long i = 0; i < expectedMerged.Length; i++) - { + { MatchRowsOnMergedDataFrame(merge, left, right, i, expectedMerged[i].Left, expectedMerged[i].Right); } @@ -2137,7 +2137,7 @@ public void TestMerge_ByTwoColumns_Complex_LeftJoin() [Fact] public void TestMerge_ByTwoColumns_Simple_ManyToMany_LeftJoin() - { + { //Test left merge by to int type columns //Arrange @@ -2145,7 +2145,7 @@ public void TestMerge_ByTwoColumns_Simple_ManyToMany_LeftJoin() left.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 })); left.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 3 })); left.Columns.Add(new Int32DataFrameColumn("G2", new[] { 1, 1, 3 })); - + var right = new DataFrame(); right.Columns.Add(new Int32DataFrameColumn("Index", new[] { 0, 1, 2 })); right.Columns.Add(new Int32DataFrameColumn("G1", new[] { 1, 1, 0 })); @@ -2154,11 +2154,11 @@ public void TestMerge_ByTwoColumns_Simple_ManyToMany_LeftJoin() // Creates this case: /* --------------------------- * Left | Right - * I G1 G2 | I G1 G2 + * I G1 G2 | I G1 G2 * --------------------------- - * 0 1 1 | 0 1 1 - * 1 1 1 | 1 1 1 - * 2 3 3 | 2 0 0 + * 0 1 1 | 0 1 1 + * 1 1 1 | 1 1 1 + * 2 3 3 | 2 0 0 */ /* @@ -2212,11 +2212,11 @@ public void TestMerge_ByTwoColumns_Simple_ManyToMany_RightJoin() // Creates this case: /* --------------------------- * Left | Right - * I G1 G2 | I G1 G2 + * I G1 G2 | I G1 G2 * --------------------------- - * 0 1 1 | 0 1 1 - * 1 1 1 | 1 1 1 - * 2 3 3 | 2 0 0 + * 0 1 1 | 0 1 1 + * 1 1 1 | 1 1 1 + * 2 3 3 | 2 0 0 */ /* @@ -2270,11 +2270,11 @@ public void TestMerge_ByTwoColumns_Simple_ManyToMany_InnerJoin() // Creates this case: /* --------------------------- * Left | Right - * I G1 G2 | I G1 G2 + * I G1 G2 | I G1 G2 * --------------------------- - * 0 1 1 | 0 1 1 - * 1 1 1 | 1 1 1 - * 2 3 3 | 2 0 0 + * 0 1 1 | 0 1 1 + * 1 1 1 | 1 1 1 + * 2 3 3 | 2 0 0 */ /* @@ -2326,11 +2326,11 @@ public void TestMerge_ByTwoColumns_Simple_ManyToMany_OuterJoin() // Creates this case: /* --------------------------- * Left | Right - * I G1 G2 | I G1 G2 + * I G1 G2 | I G1 G2 * --------------------------- - * 0 1 1 | 0 1 1 - * 1 1 1 | 1 1 1 - * 2 3 3 | 2 0 0 + * 0 1 1 | 0 1 1 + * 1 1 1 | 1 1 1 + * 2 3 3 | 2 0 0 */ /* @@ -2370,7 +2370,7 @@ public void TestMerge_ByTwoColumns_Simple_ManyToMany_OuterJoin() [Fact] public void TestMerge_ByThreeColumns_OneToOne_LeftJoin() { - //Test merge by LEFT join of int and string columns + //Test merge by LEFT join of int and string columns //Arrange var left = new DataFrame(); @@ -2390,7 +2390,7 @@ public void TestMerge_ByThreeColumns_OneToOne_LeftJoin() * Left | Right * I G1 G2 G3 | I G1 G2 G3 * ------------------------------ - * 0 1 1 A | 0 0 1 Z + * 0 1 1 A | 0 0 1 Z * 1 1 2 B | 1 1 1 Y * 2 2 1 C | 2 1 2 B */ @@ -2398,7 +2398,7 @@ public void TestMerge_ByThreeColumns_OneToOne_LeftJoin() /* * Merge will result in a DataFrame like: * IL G1 G2 G3 IR Merged: - * ------------------------- + * ------------------------- * 0 1 1 A 0 - N * 1 1 2 B 2 1 2 B 1 - 2 * 2 2 1 C 2 - N @@ -2426,7 +2426,7 @@ public void TestMerge_ByThreeColumns_OneToOne_LeftJoin() [Fact] public void TestMerge_ByThreeColumns_OneToOne_RightJoin() { - //Test merge by RIGHT join of int and string columns + //Test merge by RIGHT join of int and string columns //Arrange var left = new DataFrame(); @@ -2446,7 +2446,7 @@ public void TestMerge_ByThreeColumns_OneToOne_RightJoin() * Left | Right * I G1 G2 G3 | I G1 G2 G3 * ------------------------------ - * 0 1 1 A | 0 0 1 Z + * 0 1 1 A | 0 0 1 Z * 1 1 2 B | 1 1 1 Y * 2 2 1 C | 2 1 2 B */ @@ -2454,7 +2454,7 @@ public void TestMerge_ByThreeColumns_OneToOne_RightJoin() /* * Merge will result in a DataFrame like: * IL G1 G2 G3 IR Merged: - * ------------------------- + * ------------------------- * 0 0 1 Z N - 0 * 1 1 1 Y N - 1 * 1 1 2 B 2 1 2 B 1 - 2 @@ -2703,7 +2703,7 @@ public void TestClone(int dfLength, int intDfLength) } } } - + [Fact] public void TestColumnCreationFromExisitingColumn() { diff --git a/test/Microsoft.Data.Analysis.Tests/TextFieldParserTests.cs b/test/Microsoft.Data.Analysis.Tests/TextFieldParserTests.cs index c481d13b28..30511173fc 100644 --- a/test/Microsoft.Data.Analysis.Tests/TextFieldParserTests.cs +++ b/test/Microsoft.Data.Analysis.Tests/TextFieldParserTests.cs @@ -313,7 +313,7 @@ public void ReadFields_Delimiters_LineNumber() Assert.Equal(1, parser.LineNumber); parser.SetDelimiters(new[] { "," }); - Assert.Equal(new[] { "abc","123" }, parser.ReadFields()); + Assert.Equal(new[] { "abc", "123" }, parser.ReadFields()); Assert.Equal(2, parser.LineNumber); parser.SetDelimiters(new[] { ";", "," }); @@ -378,7 +378,7 @@ public void ReadFields_QuoteOnNewLine() Assert.Equal(1, parser.LineNumber); parser.SetDelimiters(new[] { "," }); - Assert.Equal(new[] { "abc",@"123 + Assert.Equal(new[] { "abc", @"123 123" }, parser.ReadFields()); Assert.Equal(3, parser.LineNumber); diff --git a/test/Microsoft.Extensions.ML.Tests/PredictionEnginePoolTests.cs b/test/Microsoft.Extensions.ML.Tests/PredictionEnginePoolTests.cs index 8851aefdf8..ac77ee9e4f 100644 --- a/test/Microsoft.Extensions.ML.Tests/PredictionEnginePoolTests.cs +++ b/test/Microsoft.Extensions.ML.Tests/PredictionEnginePoolTests.cs @@ -34,10 +34,10 @@ public void can_load_namedmodel() .FromFile(modelName: "model1", filePath: Path.Combine("TestModels", "SentimentModel.zip"), watchForChanges: false); var sp = services.BuildServiceProvider(); - + var pool = sp.GetRequiredService>(); var model = pool.GetModel("model1"); - + Assert.NotNull(model); } diff --git a/test/Microsoft.ML.AutoML.Tests/AutoFitTests.cs b/test/Microsoft.ML.AutoML.Tests/AutoFitTests.cs index d5af1f1be5..c914d522b5 100644 --- a/test/Microsoft.ML.AutoML.Tests/AutoFitTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/AutoFitTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -98,7 +98,7 @@ public void AutoFitMultiTest(bool useNumberOfCVFolds) else { // When using this other API, if the trainset is under the - // crossValRowCounThreshold, AutoML will also perform CrossValidation + // crossValRowCountThreshold, AutoML will also perform CrossValidation // but through a very different path that the one above, // throw a CrossValSummaryRunner and will return // a different type of object as "result" which would now be @@ -143,7 +143,7 @@ public void AutoFitImageClassificationTrainTest() [Fact(Skip = "Takes too much time, ~10 minutes.")] public void AutoFitImageClassification() { - // This test executes the code path that model builder code will take to get a model using image + // This test executes the code path that model builder code will take to get a model using image // classification API. var context = new MLContext(1); @@ -338,7 +338,7 @@ public void AutoFitRecommendationTest() // STEP 2: Run AutoML experiment try { - ExperimentResult experimentResult = mlContext.Auto() + ExperimentResult experimentResult = mlContext.Auto() .CreateRecommendationExperiment(5) .Execute(trainDataView, testDataView, new ColumnInformation() @@ -351,7 +351,7 @@ public void AutoFitRecommendationTest() RunDetail bestRun = experimentResult.BestRun; Assert.True(experimentResult.RunDetails.Count() > 1); Assert.NotNull(bestRun.ValidationMetrics); - Assert.True(experimentResult.RunDetails.Max(i => i?.ValidationMetrics?.RSquared* i?.ValidationMetrics?.RSquared) > 0.5); + Assert.True(experimentResult.RunDetails.Max(i => i?.ValidationMetrics?.RSquared * i?.ValidationMetrics?.RSquared) > 0.5); var outputSchema = bestRun.Model.GetOutputSchema(trainDataView.Schema); var expectedOutputNames = new string[] { labelColumnName, userColumnName, userColumnName, itemColumnName, itemColumnName, scoreColumnName }; @@ -468,7 +468,7 @@ public void AutoFitMaxExperimentTimeTest() var lastException = experiment.RunDetails.Last().Exception; var containsMessage = lastException.Message.Contains(expectedExceptionMessage); - if(lastException is AggregateException lastAggregateException) + if (lastException is AggregateException lastAggregateException) { // Sometimes multiple threads might throw the same "Operation was cancelled" // exception and all of them are grouped inside an AggregateException @@ -487,7 +487,7 @@ public void AutoFitMaxExperimentTimeTest() Assert.True(containsMessage, $"Did not obtain '{expectedExceptionMessage}' error." + $"Obtained unexpected error of type {lastException.GetType()} with message: {lastException.Message}"); - + // Ensure that the best found model can still run after maximum experiment time was reached. IDataView predictions = experiment.BestRun.Model.Transform(trainData); } @@ -524,4 +524,4 @@ private TextLoader.Options GetLoaderArgsRank(string labelColumnName, string grou }; } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.AutoML.Tests/BestResultUtilTests.cs b/test/Microsoft.ML.AutoML.Tests/BestResultUtilTests.cs index 3148a37f27..98d83d8cf0 100644 --- a/test/Microsoft.ML.AutoML.Tests/BestResultUtilTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/BestResultUtilTests.cs @@ -10,7 +10,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class BestResultUtilTests : BaseTestClass { public BestResultUtilTests(ITestOutputHelper output) : base(output) diff --git a/test/Microsoft.ML.AutoML.Tests/ColumnInferenceTests.cs b/test/Microsoft.ML.AutoML.Tests/ColumnInferenceTests.cs index 6a5c17911c..558e5d6788 100644 --- a/test/Microsoft.ML.AutoML.Tests/ColumnInferenceTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/ColumnInferenceTests.cs @@ -1,4 +1,8 @@ -using System; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; using System.Collections.Generic; using System.IO; using System.Linq; @@ -154,7 +158,7 @@ public void DefaultColumnNamesInferredCorrectly() UserIdColumnName = DefaultColumnNames.User, ItemIdColumnName = DefaultColumnNames.Item, }, - groupColumns : false); + groupColumns: false); Assert.Equal(DefaultColumnNames.Label, result.ColumnInformation.LabelColumnName); Assert.Equal(DefaultColumnNames.Weight, result.ColumnInformation.ExampleWeightColumnName); diff --git a/test/Microsoft.ML.AutoML.Tests/ColumnInferenceValidationUtilTests.cs b/test/Microsoft.ML.AutoML.Tests/ColumnInferenceValidationUtilTests.cs index b214706fca..eb22966ff8 100644 --- a/test/Microsoft.ML.AutoML.Tests/ColumnInferenceValidationUtilTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/ColumnInferenceValidationUtilTests.cs @@ -10,7 +10,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class ColumnInferenceValidationUtilTests : BaseTestClass { public ColumnInferenceValidationUtilTests(ITestOutputHelper output) : base(output) diff --git a/test/Microsoft.ML.AutoML.Tests/ColumnInformationUtilTests.cs b/test/Microsoft.ML.AutoML.Tests/ColumnInformationUtilTests.cs index 4b17e405ae..acda1af44c 100644 --- a/test/Microsoft.ML.AutoML.Tests/ColumnInformationUtilTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/ColumnInformationUtilTests.cs @@ -9,7 +9,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class ColumnInformationUtilTests : BaseTestClass { public ColumnInformationUtilTests(ITestOutputHelper output) : base(output) @@ -73,4 +73,4 @@ public void GetColumnNamesTest() Assert.Contains("Num", columnNames); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.AutoML.Tests/ConversionTests.cs b/test/Microsoft.ML.AutoML.Tests/ConversionTests.cs index 3dae4e5c21..0074ef24dc 100644 --- a/test/Microsoft.ML.AutoML.Tests/ConversionTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/ConversionTests.cs @@ -10,7 +10,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class ConversionTests : BaseTestClass { private readonly ITestOutputHelper _output; @@ -31,7 +31,7 @@ public void ConvertFloatMissingValues() "NA", "N/A", "NaN", "NAN" }; - foreach(var missingValue in missingValues) + foreach (var missingValue in missingValues) { float value; var success = Conversions.DefaultInstance.TryParse(missingValue.AsMemory(), out value); @@ -46,7 +46,7 @@ public void ConvertFloatParseFailure() { var values = new string[] { - "a", "aa", "nb", "aaa", "naa", "nba", "n/b" + "a", "aa", "nb", "aaa", "naa", "nba", "n/b" }; foreach (var value in values) diff --git a/test/Microsoft.ML.AutoML.Tests/DatasetDimensionsTests.cs b/test/Microsoft.ML.AutoML.Tests/DatasetDimensionsTests.cs index c53fc4d3f7..1b628f375e 100644 --- a/test/Microsoft.ML.AutoML.Tests/DatasetDimensionsTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/DatasetDimensionsTests.cs @@ -9,7 +9,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class DatasetDimensionsTests : BaseTestClass { public DatasetDimensionsTests(ITestOutputHelper output) : base(output) diff --git a/test/Microsoft.ML.AutoML.Tests/EstimatorExtensionTests.cs b/test/Microsoft.ML.AutoML.Tests/EstimatorExtensionTests.cs index 7c260e7385..507add7afb 100644 --- a/test/Microsoft.ML.AutoML.Tests/EstimatorExtensionTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/EstimatorExtensionTests.cs @@ -11,7 +11,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class EstimatorExtensionTests : BaseTestClass { public EstimatorExtensionTests(ITestOutputHelper output) : base(output) diff --git a/test/Microsoft.ML.AutoML.Tests/GetNextPipelineTests.cs b/test/Microsoft.ML.AutoML.Tests/GetNextPipelineTests.cs index cec77327f1..13f9a49c4d 100644 --- a/test/Microsoft.ML.AutoML.Tests/GetNextPipelineTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/GetNextPipelineTests.cs @@ -5,15 +5,15 @@ using System; using System.Collections.Generic; using System.Linq; -using Xunit; -using Newtonsoft.Json; +using Microsoft.ML.Runtime; using Microsoft.ML.TestFramework; +using Newtonsoft.Json; +using Xunit; using Xunit.Abstractions; -using Microsoft.ML.Runtime; namespace Microsoft.ML.AutoML.Test { - + public class GetNextPipelineTests : BaseTestClass { public GetNextPipelineTests(ITestOutputHelper output) : base(output) diff --git a/test/Microsoft.ML.AutoML.Tests/InferredPipelineTests.cs b/test/Microsoft.ML.AutoML.Tests/InferredPipelineTests.cs index f446b842e2..52e5658b12 100644 --- a/test/Microsoft.ML.AutoML.Tests/InferredPipelineTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/InferredPipelineTests.cs @@ -9,7 +9,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class InferredPipelineTests : BaseTestClass { public InferredPipelineTests(ITestOutputHelper output) : base(output) diff --git a/test/Microsoft.ML.AutoML.Tests/MetricsAgentsTests.cs b/test/Microsoft.ML.AutoML.Tests/MetricsAgentsTests.cs index 69e70f0465..94128daad5 100644 --- a/test/Microsoft.ML.AutoML.Tests/MetricsAgentsTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/MetricsAgentsTests.cs @@ -61,7 +61,7 @@ public void BinaryMetricsPerfectTest() [Fact] public void MulticlassMetricsGetScoreTest() { - var metrics = MetricsUtil.CreateMulticlassClassificationMetrics(0.1, 0.2, 0.3, 0.4, 0, new double[] {0.5}, new double[] {}); + var metrics = MetricsUtil.CreateMulticlassClassificationMetrics(0.1, 0.2, 0.3, 0.4, 0, new double[] { 0.5 }, new double[] { }); Assert.Equal(0.1, GetScore(metrics, MulticlassClassificationMetric.MicroAccuracy)); Assert.Equal(0.2, GetScore(metrics, MulticlassClassificationMetric.MacroAccuracy)); Assert.Equal(0.3, GetScore(metrics, MulticlassClassificationMetric.LogLoss)); diff --git a/test/Microsoft.ML.AutoML.Tests/MetricsUtil.cs b/test/Microsoft.ML.AutoML.Tests/MetricsUtil.cs index 3f306fdcbe..6effe65346 100644 --- a/test/Microsoft.ML.AutoML.Tests/MetricsUtil.cs +++ b/test/Microsoft.ML.AutoML.Tests/MetricsUtil.cs @@ -29,14 +29,14 @@ internal static class MetricsUtil topKAccuracy, perClassLogLoss); } - public static RegressionMetrics CreateRegressionMetrics(double l1, + public static RegressionMetrics CreateRegressionMetrics(double l1, double l2, double rms, double lossFn, double rSquared) { return CreateInstance(l1, l2, rms, lossFn, rSquared); } - public static RankingMetrics CreateRankingMetrics(double[] dcg, + public static RankingMetrics CreateRankingMetrics(double[] dcg, double[] ndcg) { return CreateInstance(dcg, ndcg); diff --git a/test/Microsoft.ML.AutoML.Tests/PurposeInferenceTests.cs b/test/Microsoft.ML.AutoML.Tests/PurposeInferenceTests.cs index 3955dae854..8541c2eef3 100644 --- a/test/Microsoft.ML.AutoML.Tests/PurposeInferenceTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/PurposeInferenceTests.cs @@ -1,4 +1,8 @@ -using System.Linq; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System.Linq; using Microsoft.ML.Data; using Microsoft.ML.TestFramework; using Xunit; diff --git a/test/Microsoft.ML.AutoML.Tests/SplitUtilTests.cs b/test/Microsoft.ML.AutoML.Tests/SplitUtilTests.cs index 0bc9490417..3178463c58 100644 --- a/test/Microsoft.ML.AutoML.Tests/SplitUtilTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/SplitUtilTests.cs @@ -11,7 +11,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class SplitUtilTests : BaseTestClass { public SplitUtilTests(ITestOutputHelper output) : base(output) diff --git a/test/Microsoft.ML.AutoML.Tests/SuggestedPipelineBuilderTests.cs b/test/Microsoft.ML.AutoML.Tests/SuggestedPipelineBuilderTests.cs index 7eef38255e..be8d75ff8a 100644 --- a/test/Microsoft.ML.AutoML.Tests/SuggestedPipelineBuilderTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/SuggestedPipelineBuilderTests.cs @@ -9,7 +9,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class SuggestedPipelineBuilderTests : BaseTestClass { private static MLContext _context = new MLContext(1); diff --git a/test/Microsoft.ML.AutoML.Tests/SweeperTests.cs b/test/Microsoft.ML.AutoML.Tests/SweeperTests.cs index 3875295b75..87561dbed5 100644 --- a/test/Microsoft.ML.AutoML.Tests/SweeperTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/SweeperTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -11,7 +11,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class SweeperTests : BaseTestClass { public SweeperTests(ITestOutputHelper output) : base(output) @@ -27,7 +27,7 @@ public void SmacQuickRunTest() var floatLogValueGenerator = new FloatValueGenerator(new FloatParamArguments() { Name = "floatLog", Min = 1, Max = 1000, LogBase = true }); var longValueGenerator = new LongValueGenerator(new LongParamArguments() { Name = "long", Min = 1, Max = 1000 }); var longLogValueGenerator = new LongValueGenerator(new LongParamArguments() { Name = "longLog", Min = 1, Max = 1000, LogBase = true }); - var discreteValueGeneator = new DiscreteValueGenerator(new DiscreteParamArguments() { Name = "discrete", Values = new[] { "200", "400", "600", "800" } }); + var discreteValueGenerator = new DiscreteValueGenerator(new DiscreteParamArguments() { Name = "discrete", Values = new[] { "200", "400", "600", "800" } }); var sweeper = new SmacSweeper(new MLContext(1), new SmacSweeper.Arguments() { @@ -36,7 +36,7 @@ public void SmacQuickRunTest() floatLogValueGenerator, longValueGenerator, longLogValueGenerator, - discreteValueGeneator + discreteValueGenerator }, NumberInitialPopulation = numInitialPopulation }); @@ -46,7 +46,7 @@ public void SmacQuickRunTest() Assert.NotNull(floatLogValueGenerator[0].ValueText); Assert.NotNull(longValueGenerator[0].ValueText); Assert.NotNull(longLogValueGenerator[0].ValueText); - Assert.NotNull(discreteValueGeneator[0].ValueText); + Assert.NotNull(discreteValueGenerator[0].ValueText); List results = new List(); @@ -95,7 +95,7 @@ public void Smac4ParamsConvergenceTest() new FloatValueGenerator(new FloatParamArguments() { Name = "x4", Min = 1, Max = 1000}), }, }); - + List results = new List(); RunResult bestResult = null; @@ -154,7 +154,7 @@ public void Smac2ParamsConvergenceTest() while (true) { ParameterSet[] pars = sweeper.ProposeSweeps(1, results); - if(pars == null) + if (pars == null) { break; } diff --git a/test/Microsoft.ML.AutoML.Tests/TextFileSampleTests.cs b/test/Microsoft.ML.AutoML.Tests/TextFileSampleTests.cs index 96e373e5bb..79eb45ce89 100644 --- a/test/Microsoft.ML.AutoML.Tests/TextFileSampleTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/TextFileSampleTests.cs @@ -10,7 +10,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class TextFileSampleTests : BaseTestClass { public TextFileSampleTests(ITestOutputHelper output) : base(output) @@ -36,7 +36,7 @@ public void CanParseLargeRandomStream() // think file is encoded with UTF-16 or UTF-32 without a BOM for (var k = 0; k < row.Length; k++) { - if(row[k] == 0) + if (row[k] == 0) { row[k] = 1; } diff --git a/test/Microsoft.ML.AutoML.Tests/TrainerExtensionsTests.cs b/test/Microsoft.ML.AutoML.Tests/TrainerExtensionsTests.cs index 87e060ddd5..f700fe0cb1 100644 --- a/test/Microsoft.ML.AutoML.Tests/TrainerExtensionsTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/TrainerExtensionsTests.cs @@ -12,7 +12,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class TrainerExtensionsTests : BaseTestClass { public TrainerExtensionsTests(ITestOutputHelper output) : base(output) @@ -177,7 +177,7 @@ public void BuildSdcaPipelineNode() public void BuildLightGbmPipelineNodeDefaultParams() { var pipelineNode = new LightGbmBinaryExtension().CreatePipelineNode( - new List(), + new List(), new ColumnInformation()); var expectedJson = @"{ ""Name"": ""LightGbmBinary"", diff --git a/test/Microsoft.ML.AutoML.Tests/TransformInferenceTests.cs b/test/Microsoft.ML.AutoML.Tests/TransformInferenceTests.cs index 640e370ba0..0265eec794 100644 --- a/test/Microsoft.ML.AutoML.Tests/TransformInferenceTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/TransformInferenceTests.cs @@ -11,7 +11,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class TransformInferenceTests : BaseTestClass { public TransformInferenceTests(ITestOutputHelper output) : base(output) diff --git a/test/Microsoft.ML.AutoML.Tests/TransformPostTrainerInferenceTests.cs b/test/Microsoft.ML.AutoML.Tests/TransformPostTrainerInferenceTests.cs index dbce8dfa22..bead839c72 100644 --- a/test/Microsoft.ML.AutoML.Tests/TransformPostTrainerInferenceTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/TransformPostTrainerInferenceTests.cs @@ -11,7 +11,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class TransformPostTrainerInferenceTests : BaseTestClass { public TransformPostTrainerInferenceTests(ITestOutputHelper output) : base(output) @@ -68,7 +68,7 @@ public void TransformPostTrainerMulticlassKeyLabel() DatasetColumnInfo[] columns, string expectedJson) { - var transforms = TransformInferenceApi.InferTransformsPostTrainer (new MLContext(1), task, columns); + var transforms = TransformInferenceApi.InferTransformsPostTrainer(new MLContext(1), task, columns); var pipelineNodes = transforms.Select(t => t.PipelineNode); Util.AssertObjectMatchesJson(expectedJson, pipelineNodes); } diff --git a/test/Microsoft.ML.AutoML.Tests/UserInputValidationTests.cs b/test/Microsoft.ML.AutoML.Tests/UserInputValidationTests.cs index 259acede05..c42de28a8c 100644 --- a/test/Microsoft.ML.AutoML.Tests/UserInputValidationTests.cs +++ b/test/Microsoft.ML.AutoML.Tests/UserInputValidationTests.cs @@ -14,7 +14,7 @@ namespace Microsoft.ML.AutoML.Test { - + public class UserInputValidationTests : BaseTestClass { private static readonly IDataView _data = DatasetUtil.GetUciAdultDataView(); diff --git a/test/Microsoft.ML.AutoML.Tests/Util.cs b/test/Microsoft.ML.AutoML.Tests/Util.cs index c23046814f..a7853cdf3c 100644 --- a/test/Microsoft.ML.AutoML.Tests/Util.cs +++ b/test/Microsoft.ML.AutoML.Tests/Util.cs @@ -6,9 +6,9 @@ using System.Collections.Generic; using System.Linq; using Microsoft.ML.Data; -using Xunit; using Newtonsoft.Json; using Newtonsoft.Json.Converters; +using Xunit; namespace Microsoft.ML.AutoML.Test { @@ -16,7 +16,7 @@ internal static class Util { public static void AssertObjectMatchesJson(string expectedJson, T obj) { - var actualJson = JsonConvert.SerializeObject(obj, + var actualJson = JsonConvert.SerializeObject(obj, Formatting.Indented, new JsonConverter[] { new StringEnumConverter() }); Assert.Equal(expectedJson, actualJson); } diff --git a/test/Microsoft.ML.AutoML.Tests/Utils/MLNetUtils/DataViewTestFixture.cs b/test/Microsoft.ML.AutoML.Tests/Utils/MLNetUtils/DataViewTestFixture.cs index 42f04daa70..c5c6528afb 100644 --- a/test/Microsoft.ML.AutoML.Tests/Utils/MLNetUtils/DataViewTestFixture.cs +++ b/test/Microsoft.ML.AutoML.Tests/Utils/MLNetUtils/DataViewTestFixture.cs @@ -59,4 +59,4 @@ public static IDataView BuildDummyDataView(IEnumerable<(string name, DataViewTyp return dataBuilder.GetDataView(); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.AutoML.Tests/Utils/TaskAgnosticAutoFit.cs b/test/Microsoft.ML.AutoML.Tests/Utils/TaskAgnosticAutoFit.cs index 49e91e4382..d28edc0e30 100644 --- a/test/Microsoft.ML.AutoML.Tests/Utils/TaskAgnosticAutoFit.cs +++ b/test/Microsoft.ML.AutoML.Tests/Utils/TaskAgnosticAutoFit.cs @@ -2,10 +2,10 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Data; using System; using System.Collections.Generic; using System.Linq; +using Microsoft.ML.Data; namespace Microsoft.ML.AutoML.Test { diff --git a/test/Microsoft.ML.AutoML.Tests/Utils/TaskAgnosticIterationResult.cs b/test/Microsoft.ML.AutoML.Tests/Utils/TaskAgnosticIterationResult.cs index 30ec1522f5..e574f71bc0 100644 --- a/test/Microsoft.ML.AutoML.Tests/Utils/TaskAgnosticIterationResult.cs +++ b/test/Microsoft.ML.AutoML.Tests/Utils/TaskAgnosticIterationResult.cs @@ -2,10 +2,10 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Data; using System; using System.Collections.Generic; using System.Linq; +using Microsoft.ML.Data; namespace Microsoft.ML.AutoML.Test { diff --git a/test/Microsoft.ML.Benchmarks.Tests/BenchmarksTest.cs b/test/Microsoft.ML.Benchmarks.Tests/BenchmarksTest.cs index 285aad7845..4c19dc5d48 100644 --- a/test/Microsoft.ML.Benchmarks.Tests/BenchmarksTest.cs +++ b/test/Microsoft.ML.Benchmarks.Tests/BenchmarksTest.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.CodeAnalyzer.Tests/Code/NameTest.cs b/test/Microsoft.ML.CodeAnalyzer.Tests/Code/NameTest.cs index fdc19d925a..68d93a3d02 100644 --- a/test/Microsoft.ML.CodeAnalyzer.Tests/Code/NameTest.cs +++ b/test/Microsoft.ML.CodeAnalyzer.Tests/Code/NameTest.cs @@ -196,4 +196,4 @@ class BeClass { } struct Marco { public int Polo; } }"; } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.CodeGenerator.Tests/ApprovalTests/ConsoleCodeGeneratorTests.cs b/test/Microsoft.ML.CodeGenerator.Tests/ApprovalTests/ConsoleCodeGeneratorTests.cs index 9bb8332f2d..b2f56b51ec 100644 --- a/test/Microsoft.ML.CodeGenerator.Tests/ApprovalTests/ConsoleCodeGeneratorTests.cs +++ b/test/Microsoft.ML.CodeGenerator.Tests/ApprovalTests/ConsoleCodeGeneratorTests.cs @@ -229,7 +229,7 @@ public void AzureImageCodeGeneratorTest() StablePackageVersion = "stableversion", UnstablePackageVersion = "unstableversion", OnnxModelName = @"/path/to/onnxModel", - OnnxRuntimePacakgeVersion = "1.2.3", + OnnxRuntimePackageVersion = "1.2.3", IsAzureAttach = true, IsObjectDetection = false, IsImage = true, @@ -265,7 +265,7 @@ public void AzureObjectDetectionCodeGeneratorTest() StablePackageVersion = "stableversion", UnstablePackageVersion = "unstableversion", OnnxModelName = @"/path/to/onnxModel", - OnnxRuntimePacakgeVersion = @"1.2.3", + OnnxRuntimePackageVersion = @"1.2.3", IsAzureAttach = true, IsImage = false, IsObjectDetection = true, @@ -305,7 +305,7 @@ public void AzureCodeGeneratorTest() StablePackageVersion = "StablePackageVersion", UnstablePackageVersion = "UnstablePackageVersion", OnnxModelName = @"\path\to\onnx", - OnnxRuntimePacakgeVersion = "1.2.3", + OnnxRuntimePackageVersion = "1.2.3", IsAzureAttach = true, IsImage = false, IsObjectDetection = false, @@ -582,7 +582,8 @@ private CodeGenerator PrepareForRecommendationTask() this._columnInference = new ColumnInferenceResults() { TextLoaderOptions = textLoaderArgs, - ColumnInformation = new ColumnInformation() { + ColumnInformation = new ColumnInformation() + { LabelColumnName = "Label", UserIdColumnName = "userId", ItemIdColumnName = "movieId" @@ -751,7 +752,7 @@ private CodeGenerator PrepareForRecommendationTask() this._columnInference = new ColumnInferenceResults() { TextLoaderOptions = textLoaderArgs, - ColumnInformation = new ColumnInformation() { LabelColumnName = "Label" , GroupIdColumnName = "GroupId"} + ColumnInformation = new ColumnInformation() { LabelColumnName = "Label", GroupIdColumnName = "GroupId" } }; } @@ -1139,7 +1140,7 @@ private static CodeGeneratorSettings CreateCodeGeneratorSettingsFor(TaskKind tas ModelName = "x:\\models\\model.zip", StablePackageVersion = StablePackageVersion, UnstablePackageVersion = UnstablePackageVersion, - OnnxRuntimePacakgeVersion = "1.2.3", + OnnxRuntimePackageVersion = "1.2.3", }; } } diff --git a/test/Microsoft.ML.CodeGenerator.Tests/ApprovalTests/TemplateTest.cs b/test/Microsoft.ML.CodeGenerator.Tests/ApprovalTests/TemplateTest.cs index a2b96b9abd..af2ce6382e 100644 --- a/test/Microsoft.ML.CodeGenerator.Tests/ApprovalTests/TemplateTest.cs +++ b/test/Microsoft.ML.CodeGenerator.Tests/ApprovalTests/TemplateTest.cs @@ -1,4 +1,8 @@ -using System; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Text; diff --git a/test/Microsoft.ML.CodeGenerator.Tests/TrainerGeneratorTests.cs b/test/Microsoft.ML.CodeGenerator.Tests/TrainerGeneratorTests.cs index 6d1560e751..8c939f41be 100644 --- a/test/Microsoft.ML.CodeGenerator.Tests/TrainerGeneratorTests.cs +++ b/test/Microsoft.ML.CodeGenerator.Tests/TrainerGeneratorTests.cs @@ -316,7 +316,7 @@ public void MatrixFactorizationAdvancedTest() var actual = codeGenerator.GenerateTrainerAndUsings(); string expectedTrainerString = "MatrixFactorization(new MatrixFactorizationTrainer.Options(){MatrixColumnIndexColumnName=\"userId\",MatrixRowIndexColumnName=\"movieId\",LabelColumnName=\"rating\",NumberOfIterations=10,LearningRate=0.01f,ApproximationRank=8,Lambda=0.01f,LossFunction=MatrixFactorizationTrainer.LossFunctionType.SquareLossRegression,Alpha=1f,C=1E-05f})"; Assert.Equal(expectedTrainerString, actual.Item1); - Assert.Equal(new string[] { "using Microsoft.ML.Trainers;\r\n" },actual.Item2); + Assert.Equal(new string[] { "using Microsoft.ML.Trainers;\r\n" }, actual.Item2); } [Fact] diff --git a/test/Microsoft.ML.CodeGenerator.Tests/UtilTest.cs b/test/Microsoft.ML.CodeGenerator.Tests/UtilTest.cs index 3c82273f58..f970f83ced 100644 --- a/test/Microsoft.ML.CodeGenerator.Tests/UtilTest.cs +++ b/test/Microsoft.ML.CodeGenerator.Tests/UtilTest.cs @@ -1,4 +1,8 @@ -using System; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; using System.Collections.Generic; using System.IO; using System.Linq; @@ -124,7 +128,7 @@ public async Task TestGenerateSampleDataAsync() await file.FlushAsync(); file.Close(); var context = new MLContext(); - var dataView = context.Data.LoadFromTextFile(filePath,separatorChar:',', hasHeader: true); + var dataView = context.Data.LoadFromTextFile(filePath, separatorChar: ',', hasHeader: true); var columnInference = new ColumnInferenceResults() { ColumnInformation = new ColumnInformation() @@ -189,7 +193,7 @@ public async Task TestGenerateSampleDataAsyncDuplicateColumnNames() [Fact] public void NormalizeTest() { - var testStrArray = new string[] { "Abc Abc", "abc ABC", "12", "12.3", "1AB .C"}; + var testStrArray = new string[] { "Abc Abc", "abc ABC", "12", "12.3", "1AB .C" }; var expectedStrArray = new string[] { "Abc_Abc", "Abc_ABC", "_12", "_12_3", "_1AB__C" }; for (int i = 0; i != expectedStrArray.Count(); ++i) { diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/ColumnTypes.cs b/test/Microsoft.ML.Core.Tests/UnitTests/ColumnTypes.cs index 6f774a7588..e50fa4ddee 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/ColumnTypes.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/ColumnTypes.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs b/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs index 6c25102de3..f3cf55cb45 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/CoreBaseTestClass.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/DataTypes.cs b/test/Microsoft.ML.Core.Tests/UnitTests/DataTypes.cs index 21384c7721..50a48ac4b0 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/DataTypes.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/DataTypes.cs @@ -18,14 +18,14 @@ public DataTypesTest(ITestOutputHelper helper) { } - private readonly static Conversions _conv = Conversions.DefaultInstance; + private static readonly Conversions _conv = Conversions.DefaultInstance; [Fact] public void R4ToSBtoR4() { var r4ToSB = Conversions.DefaultInstance.GetStringConversion(NumberDataViewType.Single); - var txToR4 = Conversions.DefaultInstance.GetStandardConversion< ReadOnlyMemory, float>( + var txToR4 = Conversions.DefaultInstance.GetStandardConversion, float>( TextDataViewType.Instance, NumberDataViewType.Single, out bool identity2); Assert.NotNull(r4ToSB); diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/FileSource.cs b/test/Microsoft.ML.Core.Tests/UnitTests/FileSource.cs index 5fb108cb84..ab535d554a 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/FileSource.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/FileSource.cs @@ -31,7 +31,7 @@ public void MultiFileSourceUnitTest() var file1 = Path.Combine(dirName, "a.txt"); var file2 = Path.Combine(dirName, "b.txt"); - + File.WriteAllText(file1, "Unit Test"); File.WriteAllText(file2, "Unit Test"); @@ -70,7 +70,7 @@ public void MultiFileSourceUnitTest() File.WriteAllText(fileDataSA, "Unit Test"); File.WriteAllText(fileDataSB, "Unit Test"); - fileSource = new MultiFileSource(dataDir+"/*"); + fileSource = new MultiFileSource(dataDir + "/*"); Assert.True(fileSource.Count == 2, $"Error passing concatenated paths to {nameof(MultiFileSource)}"); fileSource = new MultiFileSource(dataFolderDir + "/.../*"); diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/ScoreSchemaTest.cs b/test/Microsoft.ML.Core.Tests/UnitTests/ScoreSchemaTest.cs index 79a76d6a3a..27cc168da6 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/ScoreSchemaTest.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/ScoreSchemaTest.cs @@ -1,4 +1,8 @@ -using System; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; using Microsoft.ML.Data; using Xunit; using Xunit.Abstractions; diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestContracts.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestContracts.cs index 240ac075d9..80e9e763ae 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestContracts.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestContracts.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestEarlyStoppingCriteria.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestEarlyStoppingCriteria.cs index 728ce7c193..62ab15bf5f 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestEarlyStoppingCriteria.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestEarlyStoppingCriteria.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -31,7 +31,7 @@ public void TolerantEarlyStoppingCriterionTest() bool isBestCandidate; bool shouldStop; - for (int i=0; i<100; i++) + for (int i = 0; i < 100; i++) { float score = 0.001f * i; shouldStop = cr.CheckScore(score, 0, out isBestCandidate); @@ -61,15 +61,15 @@ public void GLEarlyStoppingCriterionTest() float score = 0.001f * i; shouldStop = cr.CheckScore(score, 0, out isBestCandidate); Assert.True(isBestCandidate); - Assert.False(shouldStop); + Assert.False(shouldStop); } shouldStop = cr.CheckScore(1.0f, 0, out isBestCandidate); Assert.True(isBestCandidate); - Assert.False(shouldStop); + Assert.False(shouldStop); shouldStop = cr.CheckScore(0.98f, 0, out isBestCandidate); - Assert.False(isBestCandidate); + Assert.False(isBestCandidate); Assert.True(shouldStop); } @@ -86,26 +86,26 @@ public void LPEarlyStoppingCriterionTest() float score = 0.001f * i; shouldStop = cr.CheckScore(score, score, out isBestCandidate); Assert.True(isBestCandidate); - Assert.False(shouldStop); + Assert.False(shouldStop); } for (int i = 1; i <= 10; i++) { shouldStop = cr.CheckScore(i, i, out isBestCandidate); Assert.True(isBestCandidate); - Assert.False(shouldStop); + Assert.False(shouldStop); } // At this point, average of score should be 8 and the best score should be 10. - for (int i = 0; i < 3; i++ ) + for (int i = 0; i < 3; i++) { shouldStop = cr.CheckScore(0, 10f, out isBestCandidate); - Assert.False(isBestCandidate); - Assert.False(shouldStop); + Assert.False(isBestCandidate); + Assert.False(shouldStop); } shouldStop = cr.CheckScore(0, 10f, out isBestCandidate); - Assert.False(isBestCandidate); + Assert.False(isBestCandidate); Assert.True(shouldStop); } @@ -122,26 +122,26 @@ public void PQEarlyStoppingCriterionTest() float score = 0.001f * i; shouldStop = cr.CheckScore(score, score, out isBestCandidate); Assert.True(isBestCandidate); - Assert.False(shouldStop); + Assert.False(shouldStop); } for (int i = 1; i <= 10; i++) { shouldStop = cr.CheckScore(i, i, out isBestCandidate); Assert.True(isBestCandidate); - Assert.False(shouldStop); + Assert.False(shouldStop); } // At this point, average of score should be 8 and the best score should be 10. for (int i = 0; i < 3; i++) { shouldStop = cr.CheckScore(10f, 10f, out isBestCandidate); - Assert.False(isBestCandidate); - Assert.False(shouldStop); + Assert.False(isBestCandidate); + Assert.False(shouldStop); } shouldStop = cr.CheckScore(0, 10f, out isBestCandidate); - Assert.False(isBestCandidate); + Assert.False(isBestCandidate); Assert.True(shouldStop); } @@ -159,20 +159,20 @@ public void UPEarlyStoppingCriterionTest() float score = 0.001f * i; shouldStop = cr.CheckScore(score, 0, out isBestCandidate); Assert.True(isBestCandidate); - Assert.False(shouldStop); + Assert.False(shouldStop); } for (int i = 0; i < windowSize - 1; i++) { float score = 0.09f - 0.001f * i; shouldStop = cr.CheckScore(score, 0, out isBestCandidate); - Assert.False(isBestCandidate); - Assert.False(shouldStop); + Assert.False(isBestCandidate); + Assert.False(shouldStop); } shouldStop = cr.CheckScore(0.0f, 0, out isBestCandidate); Assert.True(shouldStop); - Assert.False(isBestCandidate); + Assert.False(isBestCandidate); } } } diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs index 0f3300f8a0..d2300e7b12 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -1805,7 +1805,8 @@ public void EntryPointPipelineEnsembleText() if (i % 2 == 0) { data = new TextFeaturizingEstimator(Env, "Features", new List { "Text" }, - new TextFeaturizingEstimator.Options { + new TextFeaturizingEstimator.Options + { StopWordsRemoverOptions = new StopWordsRemovingEstimator.Options(), }).Fit(data).Transform(data); } @@ -6877,7 +6878,7 @@ void RankingWithColumnIdEntryPoint() runner.RunAll(); var data = runner.GetOutput("overall_metrics"); - using(var cursor = data.GetRowCursorForAllColumns()) + using (var cursor = data.GetRowCursorForAllColumns()) { var ndcgGetter = cursor.GetGetter>(data.Schema["NDCG"]); VBuffer ndcgBuffer = default; diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestHosts.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestHosts.cs index db27f7bb62..b3be67f208 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestHosts.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestHosts.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -101,9 +101,9 @@ public void TestCancellationApi() for (int i = 0; i < 5; i++) { - var tupple = hosts.ElementAt(i); - var newHost = tupple.Item1.Register((tupple.Item2 + 1).ToString()); - hosts.Add(new Tuple(newHost, tupple.Item2 + 1)); + var tuple = hosts.ElementAt(i); + var newHost = tuple.Item1.Register((tuple.Item2 + 1).ToString()); + hosts.Add(new Tuple(newHost, tuple.Item2 + 1)); } ((MLContext)env).CancelExecution(); diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestLoss.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestLoss.cs index 28d79a406d..d7428dbed3 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestLoss.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestLoss.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestLruCache.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestLruCache.cs index e2c4c7ba35..ef92227a2c 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestLruCache.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestLruCache.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -73,4 +73,4 @@ public void EntryLruCache() Assert.Equal("bar", keys[1]); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestModelLoad.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestModelLoad.cs index a6d3fc1dac..9159e4f4b3 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestModelLoad.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestModelLoad.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -56,4 +56,4 @@ public void LoadOldConcatTransformModel() } } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestResourceDownload.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestResourceDownload.cs index bf83d58908..a473abd265 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestResourceDownload.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestResourceDownload.cs @@ -1,4 +1,8 @@ -using System; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; using System.Collections.Generic; using System.IO; using System.Linq; @@ -118,7 +122,7 @@ public async Task TestDownloadError() if (results.ErrorMessage != null) Fail(String.Format("Expected zero length error string. Received error: {0}", results.ErrorMessage)); if (t.Status != TaskStatus.RanToCompletion) - Fail("Download did not complete succesfully"); + Fail("Download did not complete successfully"); if (!File.Exists(GetOutputPath("copyto", "sentiment.emd"))) { Fail($"File '{GetOutputPath("copyto", "sentiment.emd")}' does not exist. " + diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestUtilities.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestUtilities.cs index e94de49320..112aca85ea 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestUtilities.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestUtilities.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -45,7 +45,7 @@ public void CheckIsMonotonicallyIncreasingInt() int[] nullX = null; Assert.True(Utils.IsMonotonicallyIncreasing(nullX)); } - + [Fact] [TestCategory("Utilities")] public void CheckIsMonotonicallyIncreasingFloat() @@ -67,7 +67,7 @@ public void CheckIsMonotonicallyIncreasingFloat() x[1] = x[6]; Assert.False(Utils.IsMonotonicallyIncreasing(x)); x[1] = x1Temp; - + // NaN: `Array.Sort()` will put NaNs into the first position, // but we want to guarantee that NaNs aren't allowed in these arrays. var x0Temp = x[0]; @@ -233,7 +233,7 @@ public void CheckAreEqualBool() public void CheckAreEqualFloat() { // A sorted (increasing) array - float[] x = Enumerable.Range(0, 10).Select(i => (float) i).ToArray(); + float[] x = Enumerable.Range(0, 10).Select(i => (float)i).ToArray(); float[] y = Enumerable.Range(0, 10).Select(i => (float)i).ToArray(); Assert.True(Utils.AreEqual(x, y)); diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs index 837be7975e..be2af7f5a4 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestVBuffer.cs @@ -109,15 +109,15 @@ public void VBufferOpMath() float d = 0; switch (trial % 3) { - case 0: - d = 0; - break; - case 1: - d = 1; - break; - case 2: - d = rgen.NextDouble().ToFloat(); - break; + case 0: + d = 0; + break; + case 1: + d = 1; + break; + case 2: + d = rgen.NextDouble().ToFloat(); + break; } VectorUtils.ScaleBy(ref a, d); var editor = VBufferEditor.CreateFromBuffer(ref aOrig); @@ -952,14 +952,14 @@ private static float ScaleFactor(int trial, Random rgen) { switch (trial % 4) { - case 0: - return 0; - case 1: - return 1; - case 2: - return -1; - default: - return rgen.NextDouble().ToFloat() * 10 - 5; + case 0: + return 0; + case 1: + return 1; + case 2: + return -1; + default: + return rgen.NextDouble().ToFloat() * 10 - 5; } } @@ -1101,62 +1101,62 @@ private static void GeneratePair(Random rgen, int len, out VBuffer a, out VBufferEditor bEditor; switch (subcase) { - case GenLogic.BothDense: - // Both dense. - GenerateVBuffer(rgen, len, len, out a); - GenerateVBuffer(rgen, len, len, out b); - break; - case GenLogic.ASparseBDense: - case GenLogic.ADenseBSparse: - GenerateVBuffer(rgen, len, len, out a); - GenerateVBuffer(rgen, len, rgen.Next(len), out b); - if (subcase == GenLogic.ASparseBDense) - Utils.Swap(ref a, ref b); - break; - case GenLogic.BothSparseASameB: - GenerateVBuffer(rgen, len, rgen.Next(len), out a); - GenerateVBuffer(rgen, len, a.GetValues().Length, out b); - bEditor = VBufferEditor.CreateFromBuffer(ref b); - for (int i = 0; i < a.GetIndices().Length; ++i) - bEditor.Indices[i] = a.GetIndices()[i]; - b = bEditor.Commit(); - break; - case GenLogic.BothSparseASubsetB: - case GenLogic.BothSparseBSubsetA: - GenerateVBuffer(rgen, len, rgen.Next(len), out a); - GenerateVBuffer(rgen, a.GetValues().Length, rgen.Next(a.GetValues().Length), out b); - bEditor = VBufferEditor.Create(ref b, len, b.GetValues().Length); - for (int i = 0; i < bEditor.Values.Length; ++i) - bEditor.Indices[i] = a.GetIndices()[bEditor.Indices[i]]; - b = bEditor.Commit(); - if (subcase == GenLogic.BothSparseASubsetB) - Utils.Swap(ref a, ref b); - break; - case GenLogic.BothSparseAUnrelatedB: - GenerateVBuffer(rgen, len, rgen.Next(len), out a); - GenerateVBuffer(rgen, len, rgen.Next(len), out b); - break; - case GenLogic.BothSparseADisjointB: - GenerateVBuffer(rgen, len, rgen.Next(len), out a); - int boundary = rgen.Next(a.GetValues().Length + 1); - GenerateVBuffer(rgen, len, a.GetValues().Length - boundary, out b); - if (a.GetValues().Length != 0 && b.GetValues().Length != 0 && a.GetValues().Length != b.GetValues().Length) - { - var aEditor = VBufferEditor.CreateFromBuffer(ref a); + case GenLogic.BothDense: + // Both dense. + GenerateVBuffer(rgen, len, len, out a); + GenerateVBuffer(rgen, len, len, out b); + break; + case GenLogic.ASparseBDense: + case GenLogic.ADenseBSparse: + GenerateVBuffer(rgen, len, len, out a); + GenerateVBuffer(rgen, len, rgen.Next(len), out b); + if (subcase == GenLogic.ASparseBDense) + Utils.Swap(ref a, ref b); + break; + case GenLogic.BothSparseASameB: + GenerateVBuffer(rgen, len, rgen.Next(len), out a); + GenerateVBuffer(rgen, len, a.GetValues().Length, out b); bEditor = VBufferEditor.CreateFromBuffer(ref b); - Utils.Shuffle(rgen, aEditor.Indices); - aEditor.Indices.Slice(boundary).CopyTo(bEditor.Indices); - - GenericSpanSortHelper.Sort(aEditor.Indices, 0, boundary); - GenericSpanSortHelper.Sort(bEditor.Indices, 0, bEditor.Indices.Length); - a = aEditor.CommitTruncated(boundary); + for (int i = 0; i < a.GetIndices().Length; ++i) + bEditor.Indices[i] = a.GetIndices()[i]; b = bEditor.Commit(); - } - if (rgen.Next(2) == 0) - Utils.Swap(ref a, ref b); - break; - default: - throw Contracts.Except("Whoops, did you miss a case?"); + break; + case GenLogic.BothSparseASubsetB: + case GenLogic.BothSparseBSubsetA: + GenerateVBuffer(rgen, len, rgen.Next(len), out a); + GenerateVBuffer(rgen, a.GetValues().Length, rgen.Next(a.GetValues().Length), out b); + bEditor = VBufferEditor.Create(ref b, len, b.GetValues().Length); + for (int i = 0; i < bEditor.Values.Length; ++i) + bEditor.Indices[i] = a.GetIndices()[bEditor.Indices[i]]; + b = bEditor.Commit(); + if (subcase == GenLogic.BothSparseASubsetB) + Utils.Swap(ref a, ref b); + break; + case GenLogic.BothSparseAUnrelatedB: + GenerateVBuffer(rgen, len, rgen.Next(len), out a); + GenerateVBuffer(rgen, len, rgen.Next(len), out b); + break; + case GenLogic.BothSparseADisjointB: + GenerateVBuffer(rgen, len, rgen.Next(len), out a); + int boundary = rgen.Next(a.GetValues().Length + 1); + GenerateVBuffer(rgen, len, a.GetValues().Length - boundary, out b); + if (a.GetValues().Length != 0 && b.GetValues().Length != 0 && a.GetValues().Length != b.GetValues().Length) + { + var aEditor = VBufferEditor.CreateFromBuffer(ref a); + bEditor = VBufferEditor.CreateFromBuffer(ref b); + Utils.Shuffle(rgen, aEditor.Indices); + aEditor.Indices.Slice(boundary).CopyTo(bEditor.Indices); + + GenericSpanSortHelper.Sort(aEditor.Indices, 0, boundary); + GenericSpanSortHelper.Sort(bEditor.Indices, 0, bEditor.Indices.Length); + a = aEditor.CommitTruncated(boundary); + b = bEditor.Commit(); + } + if (rgen.Next(2) == 0) + Utils.Swap(ref a, ref b); + break; + default: + throw Contracts.Except("Whoops, did you miss a case?"); } Contracts.Assert(a.Length == len); Contracts.Assert(b.Length == len); @@ -1246,4 +1246,4 @@ private static void TestEquivalent(ref VBuffer expected, ref VBuffer ac } } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/AvxPerformanceTests.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/AvxPerformanceTests.cs index af89929c71..96f41d6699 100644 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/AvxPerformanceTests.cs +++ b/test/Microsoft.ML.CpuMath.PerformanceTests/AvxPerformanceTests.cs @@ -70,9 +70,9 @@ public float SumSqU() public float SumSqDiffU() => AvxIntrinsics.SumSqDiffU(DefaultScale, new Span(src, 0, Length)); - [Benchmark] + [Benchmark] public float SumAbsU() - => AvxIntrinsics.SumAbsU(new Span(src, 0, Length)); + => AvxIntrinsics.SumAbsU(new Span(src, 0, Length)); [Benchmark] public float SumAbsDiffU() @@ -115,7 +115,7 @@ public void SdcaL1UpdateSU() [BenchmarkCategory("Fma")] public void MatMul() => AvxIntrinsics.MatMul(testMatrixAligned, testSrcVectorAligned, testDstVectorAligned, matrixLength, matrixLength); - + [Benchmark] public void MatMulTran() => AvxIntrinsics.MatMulTran(testMatrixAligned, testSrcVectorAligned, testDstVectorAligned, matrixLength, matrixLength); diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/PerformanceTests.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/PerformanceTests.cs index 37cbb9142c..847f5a3bf2 100644 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/PerformanceTests.cs +++ b/test/Microsoft.ML.CpuMath.PerformanceTests/PerformanceTests.cs @@ -105,7 +105,7 @@ public void Setup() } testMatrixAligned = new AlignedArray(matrixLength * matrixLength, align); - testMatrixAligned.CopyFrom(src.AsSpan(0, (matrixLength - 1) * ( matrixLength - 1))); + testMatrixAligned.CopyFrom(src.AsSpan(0, (matrixLength - 1) * (matrixLength - 1))); testSrcVectorAligned = new AlignedArray(matrixLength, align); testSrcVectorAligned.CopyFrom(src1.AsSpan(0, matrixLength - 1)); // odd input @@ -121,4 +121,4 @@ public void GlobalCleanup() original.CopyTo(result, 0); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/Program.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/Program.cs index f405e2aa44..566ec8e7ba 100644 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/Program.cs +++ b/test/Microsoft.ML.CpuMath.PerformanceTests/Program.cs @@ -10,7 +10,7 @@ namespace Microsoft.ML.CpuMath.PerformanceTests { class Program { - public static void Main(string[] args) + public static void Main(string[] args) => BenchmarkSwitcher .FromAssembly(typeof(Program).Assembly) .Run(args, CreateCustomConfig()); diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/SmallInputCpuMathPerformanceTests.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/SmallInputCpuMathPerformanceTests.cs index 619bd7948f..c890f69d93 100644 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/SmallInputCpuMathPerformanceTests.cs +++ b/test/Microsoft.ML.CpuMath.PerformanceTests/SmallInputCpuMathPerformanceTests.cs @@ -7,7 +7,7 @@ namespace Microsoft.ML.CpuMath.PerformanceTests { - public class SmallInputCpuMathPerformanceTests: PerformanceTests + public class SmallInputCpuMathPerformanceTests : PerformanceTests { private int _smallInputLength = 10; diff --git a/test/Microsoft.ML.CpuMath.PerformanceTests/SsePerformanceTests.cs b/test/Microsoft.ML.CpuMath.PerformanceTests/SsePerformanceTests.cs index 653ed3da7b..4006589ab5 100644 --- a/test/Microsoft.ML.CpuMath.PerformanceTests/SsePerformanceTests.cs +++ b/test/Microsoft.ML.CpuMath.PerformanceTests/SsePerformanceTests.cs @@ -13,11 +13,11 @@ public class SsePerformanceTests : PerformanceTests [Benchmark] public void AddScalarU() => SseIntrinsics.AddScalarU(DefaultScale, new Span(dst, 0, Length)); - + [Benchmark] public void Scale() => SseIntrinsics.Scale(DefaultScale, new Span(dst, 0, Length)); - + [Benchmark] public void ScaleSrcU() => SseIntrinsics.ScaleSrcU(DefaultScale, src, dst, Length); @@ -25,7 +25,7 @@ public void ScaleSrcU() [Benchmark] public void ScaleAddU() => SseIntrinsics.ScaleAddU(DefaultScale, DefaultScale, new Span(dst, 0, Length)); - + [Benchmark] public void AddScaleU() => SseIntrinsics.AddScaleU(DefaultScale, src, dst, Length); @@ -57,11 +57,11 @@ public float Sum() [Benchmark] public float SumSqU() => SseIntrinsics.SumSqU(new Span(src, 0, Length)); - + [Benchmark] public float SumSqDiffU() => SseIntrinsics.SumSqDiffU(DefaultScale, new Span(src, 0, Length)); - + [Benchmark] public float SumAbsU() => SseIntrinsics.SumAbsU(new Span(src, 0, Length)); @@ -69,23 +69,23 @@ public float SumAbsU() [Benchmark] public float SumAbsDiffU() => SseIntrinsics.SumAbsDiffU(DefaultScale, new Span(src, 0, Length)); - + [Benchmark] public float MaxAbsU() => SseIntrinsics.MaxAbsU(new Span(src, 0, Length)); - + [Benchmark] public float MaxAbsDiffU() => SseIntrinsics.MaxAbsDiffU(DefaultScale, new Span(src, 0, Length)); - + [Benchmark] public float DotU() => SseIntrinsics.DotU(src, dst, Length); - + [Benchmark] public float DotSU() => SseIntrinsics.DotSU(src, dst, idx, IndexLength); - + [Benchmark] public float Dist2() => SseIntrinsics.Dist2(src, dst, Length); diff --git a/test/Microsoft.ML.CpuMath.UnitTests/UnitTests.cs b/test/Microsoft.ML.CpuMath.UnitTests/UnitTests.cs index 3f507f1e5d..c71a4fabc2 100644 --- a/test/Microsoft.ML.CpuMath.UnitTests/UnitTests.cs +++ b/test/Microsoft.ML.CpuMath.UnitTests/UnitTests.cs @@ -516,7 +516,7 @@ public void AddUTest(string mode, string test, Dictionary enviro var actual = dst; Assert.Equal(expected, actual, _comparer); return RemoteExecutor.SuccessExitCode; - }, mode, test, new RemoteInvokeOptions (environmentVariables)); + }, mode, test, new RemoteInvokeOptions(environmentVariables)); } [Theory] @@ -927,4 +927,4 @@ public int GetHashCode(float a) throw new NotImplementedException(); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.IntegrationTests/Common.cs b/test/Microsoft.ML.IntegrationTests/Common.cs index be60b71c3e..9f3a042046 100644 --- a/test/Microsoft.ML.IntegrationTests/Common.cs +++ b/test/Microsoft.ML.IntegrationTests/Common.cs @@ -15,7 +15,7 @@ namespace Microsoft.ML.IntegrationTests internal static class Common { /// - /// Asssert that an rows are of . + /// Assert that an rows are of . /// /// An . public static void AssertTypeTestDataset(IDataView testTypeDataset) @@ -316,7 +316,7 @@ public static void AssertNotEqual(float[] array1, float[] array2) // to roll our own float equality checker. Assert.Equal(array1[i], array2[i]); } - catch(EqualException) + catch (EqualException) { mismatch = true; break; diff --git a/test/Microsoft.ML.IntegrationTests/DataIO.cs b/test/Microsoft.ML.IntegrationTests/DataIO.cs index 431b1ff2f7..20b1607b87 100644 --- a/test/Microsoft.ML.IntegrationTests/DataIO.cs +++ b/test/Microsoft.ML.IntegrationTests/DataIO.cs @@ -21,7 +21,7 @@ public class DataIO : IntegrationTestBaseClass public DataIO(ITestOutputHelper output) : base(output) { // SaveAsText expects a "space, tab, comma, semicolon, or bar". - _separators = new char[] { ' ', '\t', ',', ';', '|', }; + _separators = new char[] { ' ', '\t', ',', ';', '|', }; } /// @@ -66,7 +66,7 @@ public void ExportToIEnumerable() public void WriteToAndReadFromADelimetedFile() { var mlContext = new MLContext(seed: 1); - + var dataBefore = mlContext.Data.LoadFromEnumerable(TypeTestData.GenerateDataset()); foreach (var separator in _separators) diff --git a/test/Microsoft.ML.IntegrationTests/DataTransformation.cs b/test/Microsoft.ML.IntegrationTests/DataTransformation.cs index 445680f7d5..b073c3df1b 100644 --- a/test/Microsoft.ML.IntegrationTests/DataTransformation.cs +++ b/test/Microsoft.ML.IntegrationTests/DataTransformation.cs @@ -174,7 +174,7 @@ public void ExtensibilityNormalizeColumns() // Compose the transformation. var pipeline = mlContext.Transforms.Concatenate("Features", Iris.Features) .Append(mlContext.Transforms.NormalizeMinMax("Features")); - + // Transform the data. var transformedData = pipeline.Fit(data).Transform(data); @@ -221,4 +221,4 @@ private float GetRandomNumber(float number) return (float)rng.NextDouble(); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.IntegrationTests/Debugging.cs b/test/Microsoft.ML.IntegrationTests/Debugging.cs index 3e89f594ef..4456f0cddf 100644 --- a/test/Microsoft.ML.IntegrationTests/Debugging.cs +++ b/test/Microsoft.ML.IntegrationTests/Debugging.cs @@ -53,7 +53,7 @@ public void InspectIntermediatePipelineSteps() KeepPunctuations = false, OutputTokensColumnName = "FeaturizeTextTokens", CharFeatureExtractor = null, // new WordBagEstimator.Options { NgramLength = 0, SkipLength = -1 }, - WordFeatureExtractor = new WordBagEstimator.Options { NgramLength = 1}, + WordFeatureExtractor = new WordBagEstimator.Options { NgramLength = 1 }, Norm = TextFeaturizingEstimator.NormFunction.None }, "SentimentText"); @@ -139,7 +139,7 @@ public void InspectSchemaUponLoadingData() if (i == 0) Assert.Equal("Label", column.Name); else - Assert.Equal(HousingRegression.Features[i-1], column.Name); + Assert.Equal(HousingRegression.Features[i - 1], column.Name); i++; } @@ -149,7 +149,7 @@ public void InspectSchemaUponLoadingData() // Validate there was data in the row by checking that some values were not zero since zero is the default. var rowSum = row.MedianHomeValue; foreach (var property in HousingRegression.Features) - rowSum += (float) row.GetType().GetProperty(property).GetValue(row, null); + rowSum += (float)row.GetType().GetProperty(property).GetValue(row, null); Assert.NotEqual(0, rowSum); } @@ -191,7 +191,8 @@ public void ViewTrainingOutput() } } - internal class LogWatcher { + internal class LogWatcher + { public readonly ConcurrentDictionary Lines; @@ -199,7 +200,7 @@ public LogWatcher() { Lines = new ConcurrentDictionary(); } - + public void ObserveEvent(object sender, LoggingEventArgs e) { Lines.AddOrUpdate(e.Message, 1, (key, oldValue) => oldValue + 1); diff --git a/test/Microsoft.ML.IntegrationTests/Evaluation.cs b/test/Microsoft.ML.IntegrationTests/Evaluation.cs index 3f92fa98b7..53c7db1ce3 100644 --- a/test/Microsoft.ML.IntegrationTests/Evaluation.cs +++ b/test/Microsoft.ML.IntegrationTests/Evaluation.cs @@ -15,7 +15,7 @@ namespace Microsoft.ML.IntegrationTests { public class Evaluation : IntegrationTestBaseClass { - public Evaluation(ITestOutputHelper output): base(output) + public Evaluation(ITestOutputHelper output) : base(output) { } @@ -235,7 +235,8 @@ public void TrainAndEvaluateRecommendation() // Create a pipeline to train on the sentiment data. var pipeline = mlContext.Recommendation().Trainers.MatrixFactorization( - new MatrixFactorizationTrainer.Options{ + new MatrixFactorizationTrainer.Options + { MatrixColumnIndexColumnName = "MatrixColumnIndex", MatrixRowIndexColumnName = "MatrixRowIndex", LabelColumnName = "Label", @@ -325,4 +326,4 @@ public void TrainAndEvaluateWithPrecisionRecallCurves() // c. Append the Precision and Recall to an IList. } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.IntegrationTests/Explainability.cs b/test/Microsoft.ML.IntegrationTests/Explainability.cs index 124137a0e7..58f5bfde64 100644 --- a/test/Microsoft.ML.IntegrationTests/Explainability.cs +++ b/test/Microsoft.ML.IntegrationTests/Explainability.cs @@ -44,7 +44,7 @@ public void GlobalFeatureImportanceWithPermutationFeatureImportance(bool saveMod IDataView transformedData; RegressionPredictionTransformer linearPredictor; - if(saveModel) + if (saveModel) { ITransformer loadedModel; diff --git a/test/Microsoft.ML.IntegrationTests/IntrospectiveTraining.cs b/test/Microsoft.ML.IntegrationTests/IntrospectiveTraining.cs index 77eebc24fa..c69880a175 100644 --- a/test/Microsoft.ML.IntegrationTests/IntrospectiveTraining.cs +++ b/test/Microsoft.ML.IntegrationTests/IntrospectiveTraining.cs @@ -81,7 +81,7 @@ public void InspectFastTreeModelParameters() var pipeline = mlContext.Transforms.Text.FeaturizeText("Features", "SentimentText") .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.FastTree( - new FastTreeBinaryTrainer.Options{ NumberOfLeaves = 5, NumberOfTrees= 3, NumberOfThreads = 1 })); + new FastTreeBinaryTrainer.Options { NumberOfLeaves = 5, NumberOfTrees = 3, NumberOfThreads = 1 })); // Fit the pipeline. var model = pipeline.Fit(data); @@ -182,7 +182,7 @@ public void InspectLdaModelParameters() // Define the pipeline. var pipeline = mlContext.Transforms.Text.ProduceWordBags("SentimentBag", "SentimentText") - .Append(mlContext.Transforms.Text.LatentDirichletAllocation("Features", "SentimentBag", + .Append(mlContext.Transforms.Text.LatentDirichletAllocation("Features", "SentimentBag", numberOfTopics: numTopics, maximumNumberOfIterations: 10)); // Fit the pipeline. @@ -194,7 +194,7 @@ public void InspectLdaModelParameters() // Get the topics and summaries from the model. var ldaDetails = ldaTransform.GetLdaDetails(0); Assert.False(ldaDetails.ItemScoresPerTopic == null && ldaDetails.WordScoresPerTopic == null); - if(ldaDetails.ItemScoresPerTopic != null) + if (ldaDetails.ItemScoresPerTopic != null) Assert.Equal(numTopics, ldaDetails.ItemScoresPerTopic.Count); if (ldaDetails.WordScoresPerTopic != null) Assert.Equal(numTopics, ldaDetails.WordScoresPerTopic.Count); @@ -212,7 +212,7 @@ public void InspectLdaModelParameters() /// Introspective Training: Linear model parameters may be inspected. /// [Fact] - public void InpsectLinearModelParameters() + public void InspectLinearModelParameters() { var mlContext = new MLContext(seed: 1); @@ -278,7 +278,7 @@ public void IntrospectNormalization() Common.AssertFiniteNumbers(config.Scale); } /// - /// Introspective Training: I can inspect a pipeline to determine which transformers were included. + /// Introspective Training: I can inspect a pipeline to determine which transformers were included. /// [Fact] public void InspectPipelineContents() @@ -308,7 +308,7 @@ public void InspectPipelineContents() { // It is possible to get the type at runtime. Assert.IsType(expectedTypes[i], transformer); - + // It's also possible to inspect the schema output from the transform. currentSchema = transformer.GetOutputSchema(currentSchema); foreach (var expectedColumn in expectedColumns[i]) @@ -345,7 +345,7 @@ public void InspectSlotNamesForReversibleHash() // Transform the data. var transformedData = model.Transform(data); - // Verify that the slotnames can be used to backtrack to the original values by confirming that + // Verify that the slotnames can be used to backtrack to the original values by confirming that // all unique values in the input data are in the output data slot names. // First get a list of the unique values. VBuffer> categoricalSlotNames = new VBuffer>(); @@ -372,12 +372,12 @@ public void InspectSlotNamesForReversibleHash() for (int i = 0; i < Adult.CategoricalFeatures.Length; i++) { // Fetch the categorical value. - string value = (string) row.GetType().GetProperty(Adult.CategoricalFeatures[i]).GetValue(row, null); + string value = (string)row.GetType().GetProperty(Adult.CategoricalFeatures[i]).GetValue(row, null); Assert.Contains($"{i}:{value}", uniqueValues); } } } - + /// /// Introspective Training: I can create nested pipelines, and extract individual components. /// @@ -431,9 +431,11 @@ private IEstimator { new TestDataXY() { A = new float[InputSize] } }; - var stringData = new List { new TestDataDifferntType() { data_0 = new string[InputSize] } }; + var stringData = new List { new TestDataDifferentType() { data_0 = new string[InputSize] } }; var sizeData = new List { new TestDataSize() { data_0 = new float[2] } }; var pipe = ML.Transforms.DnnFeaturizeImage("output_1", m => m.ModelSelector.ResNet18(m.Environment, m.OutputColumn, m.InputColumn), "data_0"); @@ -132,7 +132,7 @@ public void OnnxFeaturizerWorkout() [OnnxFact] public void TestOldSavingAndLoading() { - //skip running for x86 as this test using too much memory (over 2GB limit on x86) + //skip running for x86 as this test using too much memory (over 2GB limit on x86) //and very like to hit memory related issue when running on CI //TODO: optimized memory usage in related code and enable x86 run if (!Environment.Is64BitProcess) @@ -218,14 +218,14 @@ public void TestLoadFromDiskAndPredictionEngine() allowQuoting: true, allowSparse: false); - var dataProcessPipeline = ML.Transforms.Conversion.MapValueToKey("Label", "Label") - .Append(ML.Transforms.LoadImages("ImagePath_featurized", imageFolder, "ImagePath")) - .Append(ML.Transforms.ResizeImages("ImagePath_featurized", 224, 224, "ImagePath_featurized")) - .Append(ML.Transforms.ExtractPixels("ImagePath_featurized", "ImagePath_featurized")) - .Append(ML.Transforms.DnnFeaturizeImage("ImagePath_featurized", m => m.ModelSelector.ResNet18(m.Environment, m.OutputColumn, m.InputColumn), "ImagePath_featurized")) - .Append(ML.Transforms.Concatenate("Features", new[] { "ImagePath_featurized" })) - .Append(ML.Transforms.NormalizeMinMax("Features", "Features")) - .AppendCacheCheckpoint(ML); + var dataProcessPipeline = ML.Transforms.Conversion.MapValueToKey("Label", "Label") + .Append(ML.Transforms.LoadImages("ImagePath_featurized", imageFolder, "ImagePath")) + .Append(ML.Transforms.ResizeImages("ImagePath_featurized", 224, 224, "ImagePath_featurized")) + .Append(ML.Transforms.ExtractPixels("ImagePath_featurized", "ImagePath_featurized")) + .Append(ML.Transforms.DnnFeaturizeImage("ImagePath_featurized", m => m.ModelSelector.ResNet18(m.Environment, m.OutputColumn, m.InputColumn), "ImagePath_featurized")) + .Append(ML.Transforms.Concatenate("Features", new[] { "ImagePath_featurized" })) + .Append(ML.Transforms.NormalizeMinMax("Features", "Features")) + .AppendCacheCheckpoint(ML); var trainer = ML.MulticlassClassification.Trainers.OneVersusAll(ML.BinaryClassification.Trainers.AveragedPerceptron(labelColumnName: "Label", numberOfIterations: 10, featureColumnName: "Features"), labelColumnName: "Label") .Append(ML.Transforms.Conversion.MapKeyToValue("PredictedLabel", "PredictedLabel")); diff --git a/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs b/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs index 1bc8097644..706a2a755e 100644 --- a/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs +++ b/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs @@ -12,12 +12,12 @@ using Microsoft.ML.RunTests; using Microsoft.ML.Runtime; using Microsoft.ML.TestFramework.Attributes; +using Microsoft.ML.TestFrameworkCommon.Attributes; using Microsoft.ML.Tools; using Microsoft.ML.Transforms.Image; +using Microsoft.ML.Transforms.Onnx; using Xunit; using Xunit.Abstractions; -using Microsoft.ML.Transforms.Onnx; -using Microsoft.ML.TestFrameworkCommon.Attributes; namespace Microsoft.ML.Tests { @@ -66,7 +66,7 @@ private class TestDataXY public float[] A; } - private class TestDataDifferntType + private class TestDataDifferentType { [VectorType(InputSize)] public string[] data_0; @@ -139,12 +139,12 @@ public void TestSimpleCase(bool useOptionsCtor) }); var xyData = new List { new TestDataXY() { A = new float[InputSize] } }; - var stringData = new List { new TestDataDifferntType() { data_0 = new string[InputSize] } }; + var stringData = new List { new TestDataDifferentType() { data_0 = new string[InputSize] } }; var sizeData = new List { new TestDataSize() { data_0 = new float[2] } }; var options = new OnnxOptions() { OutputColumns = new[] { "softmaxout_1" }, - InputColumns = new[] {"data_0" }, + InputColumns = new[] { "data_0" }, ModelFile = modelFile, GpuDeviceId = _gpuDeviceId, FallbackToCpu = _fallbackToCpu, @@ -540,7 +540,7 @@ public void OnnxModelInMemoryImage() // Convert training data to IDataView, the general data type used in ML.NET. var dataView = ML.Data.LoadFromEnumerable(dataPoints); - // Create a ML.NET pipeline which contains two steps. First, ExtractPixle is used to convert the 224x224 image to a 3x224x224 float tensor. + // Create a ML.NET pipeline which contains two steps. First, ExtractPixel is used to convert the 224x224 image to a 3x224x224 float tensor. // Then the float tensor is fed into a ONNX model with an input called "data_0" and an output called "softmaxout_1". Note that "data_0" and // "softmaxout_1" are model input and output names stored in the used ONNX model file. Users may need to inspect their own models to // get the right input and output column names. diff --git a/test/Microsoft.ML.PerformanceTests/CacheDataViewBench.cs b/test/Microsoft.ML.PerformanceTests/CacheDataViewBench.cs index 15b2ab6ea6..f409fb33e7 100644 --- a/test/Microsoft.ML.PerformanceTests/CacheDataViewBench.cs +++ b/test/Microsoft.ML.PerformanceTests/CacheDataViewBench.cs @@ -4,8 +4,8 @@ using System; using BenchmarkDotNet.Attributes; -using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Data; +using Microsoft.ML.PerformanceTests.Harness; namespace Microsoft.ML.PerformanceTests { @@ -69,14 +69,14 @@ public void Setup() [Benchmark] public void CacheWithCursor() { - // This setup takes very less time to execute as compared to the actual _cursorGetter. + // This setup takes very less time to execute as compared to the actual _cursorGetter. // The most preferable position for this setup will be in GlobalSetup. _cursor = _cacheDataView.GetRowCursor(_col); _cursorGetter = _cursor.GetGetter(_col); int val = 0; while (_cursor.MoveNext()) - _cursorGetter(ref val); + _cursorGetter(ref val); } [Benchmark] diff --git a/test/Microsoft.ML.PerformanceTests/FeaturizeTextBench.cs b/test/Microsoft.ML.PerformanceTests/FeaturizeTextBench.cs index 5916de4082..c342d8a883 100644 --- a/test/Microsoft.ML.PerformanceTests/FeaturizeTextBench.cs +++ b/test/Microsoft.ML.PerformanceTests/FeaturizeTextBench.cs @@ -3,11 +3,11 @@ // See the LICENSE file in the project root for more information. using System; -using System.IO; using System.Collections.Generic; +using System.IO; using System.Linq; -using Microsoft.ML.Data; using BenchmarkDotNet.Attributes; +using Microsoft.ML.Data; using Microsoft.ML.Transforms.Text; using Xunit; @@ -31,7 +31,7 @@ public void SetupData() path = RandomFile.CreateRandomFile(path, _numRows, _numColumns, _maxWordLength); var columns = new List(); - for(int i = 0; i < _numColumns; i++) + for (int i = 0; i < _numColumns; i++) { columns.Add(new TextLoader.Column($"Column{i}", DataKind.String, i)); } @@ -71,7 +71,7 @@ public ITransformer TrainFeaturizeText() featurizers.Add(featurizer); } - IEstimator pipeline = featurizers.First(); + IEstimator pipeline = featurizers.First(); foreach (var featurizer in featurizers.Skip(1)) { pipeline = pipeline.Append(featurizer); diff --git a/test/Microsoft.ML.PerformanceTests/Harness/ProjectGenerator.cs b/test/Microsoft.ML.PerformanceTests/Harness/ProjectGenerator.cs index f5a0b96769..55f2036e75 100644 --- a/test/Microsoft.ML.PerformanceTests/Harness/ProjectGenerator.cs +++ b/test/Microsoft.ML.PerformanceTests/Harness/ProjectGenerator.cs @@ -60,7 +60,7 @@ protected override void GenerateProject(BuildPartition buildPartition, Artifacts "); // This overrides the .exe path to also involve the runtimeIdentifier for .NET Framework - protected override string GetBinariesDirectoryPath(string buildArtifactsDirectoryPath, string configuration) + protected override string GetBinariesDirectoryPath(string buildArtifactsDirectoryPath, string configuration) => Path.Combine(buildArtifactsDirectoryPath, "bin", configuration, TargetFrameworkMoniker, _runtimeIdentifier); private string GenerateNativeReferences(BuildPartition buildPartition, ILogger logger) diff --git a/test/Microsoft.ML.PerformanceTests/HashBench.cs b/test/Microsoft.ML.PerformanceTests/HashBench.cs index afb00dc511..bbc032d68f 100644 --- a/test/Microsoft.ML.PerformanceTests/HashBench.cs +++ b/test/Microsoft.ML.PerformanceTests/HashBench.cs @@ -5,8 +5,8 @@ using System; using System.Linq; using BenchmarkDotNet.Attributes; -using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Data; +using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Runtime; using Microsoft.ML.Transforms; diff --git a/test/Microsoft.ML.PerformanceTests/ImageClassificationBench.cs b/test/Microsoft.ML.PerformanceTests/ImageClassificationBench.cs index 2b4f267b69..1aedf19d69 100644 --- a/test/Microsoft.ML.PerformanceTests/ImageClassificationBench.cs +++ b/test/Microsoft.ML.PerformanceTests/ImageClassificationBench.cs @@ -3,17 +3,17 @@ // See the LICENSE file in the project root for more information. using System; +using System.Collections.Generic; using System.IO; using System.IO.Compression; -using System.Collections.Generic; using System.Linq; +using System.Net.Http; using System.Threading.Tasks; +using BenchmarkDotNet.Attributes; using Microsoft.ML.Data; using Microsoft.ML.Transforms; -using BenchmarkDotNet.Attributes; -using static Microsoft.ML.DataOperationsCatalog; -using System.Net.Http; using Microsoft.ML.Vision; +using static Microsoft.ML.DataOperationsCatalog; namespace Microsoft.ML.PerformanceTests { diff --git a/test/Microsoft.ML.PerformanceTests/KMeansAndLogisticRegressionBench.cs b/test/Microsoft.ML.PerformanceTests/KMeansAndLogisticRegressionBench.cs index 82c0da7022..b23d89593f 100644 --- a/test/Microsoft.ML.PerformanceTests/KMeansAndLogisticRegressionBench.cs +++ b/test/Microsoft.ML.PerformanceTests/KMeansAndLogisticRegressionBench.cs @@ -3,9 +3,9 @@ // See the LICENSE file in the project root for more information. using BenchmarkDotNet.Attributes; -using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Calibrators; using Microsoft.ML.Data; +using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Trainers; namespace Microsoft.ML.PerformanceTests @@ -46,4 +46,4 @@ public class KMeansAndLogisticRegressionBench : BenchmarkBase return model.LastTransformer.Model; } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.PerformanceTests/Numeric/Ranking.cs b/test/Microsoft.ML.PerformanceTests/Numeric/Ranking.cs index 78504fe752..a7a4a9f292 100644 --- a/test/Microsoft.ML.PerformanceTests/Numeric/Ranking.cs +++ b/test/Microsoft.ML.PerformanceTests/Numeric/Ranking.cs @@ -5,11 +5,11 @@ using System.IO; using BenchmarkDotNet.Attributes; using Microsoft.ML.Data; -using Microsoft.ML.Trainers.LightGbm; +using Microsoft.ML.TestFrameworkCommon; using Microsoft.ML.Trainers; using Microsoft.ML.Trainers.FastTree; +using Microsoft.ML.Trainers.LightGbm; using Microsoft.ML.Transforms; -using Microsoft.ML.TestFrameworkCommon; namespace Microsoft.ML.PerformanceTests { diff --git a/test/Microsoft.ML.PerformanceTests/PredictionEngineBench.cs b/test/Microsoft.ML.PerformanceTests/PredictionEngineBench.cs index 0e2f2823f3..4b8ffccfbe 100644 --- a/test/Microsoft.ML.PerformanceTests/PredictionEngineBench.cs +++ b/test/Microsoft.ML.PerformanceTests/PredictionEngineBench.cs @@ -1,10 +1,10 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using BenchmarkDotNet.Attributes; -using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Data; +using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Trainers; using Microsoft.ML.Transforms; diff --git a/test/Microsoft.ML.PerformanceTests/RffTransform.cs b/test/Microsoft.ML.PerformanceTests/RffTransform.cs index 81f6507b95..8cb1f36f5e 100644 --- a/test/Microsoft.ML.PerformanceTests/RffTransform.cs +++ b/test/Microsoft.ML.PerformanceTests/RffTransform.cs @@ -4,8 +4,8 @@ using System.IO; using BenchmarkDotNet.Attributes; -using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Data; +using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.TestFrameworkCommon; using Microsoft.ML.Transforms; @@ -37,7 +37,7 @@ public void CV_Multiclass_Digits_RffTransform_OVAAveragedPerceptron() new TextLoader.Column("Features", DataKind.Single, new[] {new TextLoader.Range() {Min = 0, Max = 63}}) }, HasHeader = false, - Separators = new[] {','} + Separators = new[] { ',' } }); var data = loader.Load(_dataPathDigits); diff --git a/test/Microsoft.ML.PerformanceTests/ShuffleRowsBench.cs b/test/Microsoft.ML.PerformanceTests/ShuffleRowsBench.cs index 710923bad6..a4f007fc49 100644 --- a/test/Microsoft.ML.PerformanceTests/ShuffleRowsBench.cs +++ b/test/Microsoft.ML.PerformanceTests/ShuffleRowsBench.cs @@ -3,8 +3,8 @@ // See the LICENSE file in the project root for more information. using BenchmarkDotNet.Attributes; -using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Data; +using Microsoft.ML.PerformanceTests.Harness; namespace Microsoft.ML.PerformanceTests { @@ -13,7 +13,7 @@ public class ShuffleRowsBench : BenchmarkBase { private TrainRow[] _rows; private MLContext _context; - + [GlobalSetup] public void Setup() { diff --git a/test/Microsoft.ML.PerformanceTests/StochasticDualCoordinateAscentClassifierBench.cs b/test/Microsoft.ML.PerformanceTests/StochasticDualCoordinateAscentClassifierBench.cs index e54df47295..33bd2e5982 100644 --- a/test/Microsoft.ML.PerformanceTests/StochasticDualCoordinateAscentClassifierBench.cs +++ b/test/Microsoft.ML.PerformanceTests/StochasticDualCoordinateAscentClassifierBench.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -6,8 +6,8 @@ using System.Globalization; using BenchmarkDotNet.Attributes; using BenchmarkDotNet.Engines; -using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Data; +using Microsoft.ML.PerformanceTests.Harness; using Microsoft.ML.Trainers; using Microsoft.ML.Transforms; using Microsoft.ML.Transforms.Text; diff --git a/test/Microsoft.ML.PerformanceTests/Text/MultiClassClassification.cs b/test/Microsoft.ML.PerformanceTests/Text/MultiClassClassification.cs index 87234138c2..14d9f8f69c 100644 --- a/test/Microsoft.ML.PerformanceTests/Text/MultiClassClassification.cs +++ b/test/Microsoft.ML.PerformanceTests/Text/MultiClassClassification.cs @@ -1,14 +1,14 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.IO; using BenchmarkDotNet.Attributes; using Microsoft.ML.Data; -using Microsoft.ML.Trainers.LightGbm; +using Microsoft.ML.TestFrameworkCommon; using Microsoft.ML.Trainers; +using Microsoft.ML.Trainers.LightGbm; using Microsoft.ML.Transforms; -using Microsoft.ML.TestFrameworkCommon; namespace Microsoft.ML.PerformanceTests { diff --git a/test/Microsoft.ML.PerformanceTests/TextLoaderBench.cs b/test/Microsoft.ML.PerformanceTests/TextLoaderBench.cs index 2aabb2f91b..a82d4f2e47 100644 --- a/test/Microsoft.ML.PerformanceTests/TextLoaderBench.cs +++ b/test/Microsoft.ML.PerformanceTests/TextLoaderBench.cs @@ -3,11 +3,11 @@ // See the LICENSE file in the project root for more information. using System; -using System.IO; using System.Collections.Generic; +using System.IO; using System.Linq; -using Microsoft.ML.Data; using BenchmarkDotNet.Attributes; +using Microsoft.ML.Data; using Microsoft.ML.Transforms.Text; using Xunit; @@ -34,7 +34,7 @@ public void SetupData() path = RandomFile.CreateRandomFile(path, _numRows, _numColumns, _maxWordLength); _columns = new List(); - for(int i = 0; i < _numColumns; i++) + for (int i = 0; i < _numColumns; i++) { _columns.Add(new TextLoader.Column($"Column{i}", DataKind.String, i)); } @@ -55,7 +55,7 @@ public void SetupData() [Benchmark] public void TestTextLoaderGetters() { - using(var rowCursor = _dataView.GetRowCursorForAllColumns()) + using (var rowCursor = _dataView.GetRowCursorForAllColumns()) { var getters = new List>>(); for (int i = 0; i < _numColumnsToGet; i++) diff --git a/test/Microsoft.ML.PerformanceTests/TextPredictionEngineCreation.cs b/test/Microsoft.ML.PerformanceTests/TextPredictionEngineCreation.cs index 91e1019273..dde5d21cf1 100644 --- a/test/Microsoft.ML.PerformanceTests/TextPredictionEngineCreation.cs +++ b/test/Microsoft.ML.PerformanceTests/TextPredictionEngineCreation.cs @@ -55,4 +55,4 @@ public void Setup() return _context.Model.CreatePredictionEngine(_trainedModelOldFormat); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdIndenterTest.cs b/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdIndenterTest.cs index 5976a1f793..b04e51e5f2 100644 --- a/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdIndenterTest.cs +++ b/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdIndenterTest.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdLine.cs b/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdLine.cs index 5ee5d05823..474798e208 100644 --- a/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdLine.cs +++ b/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdLine.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -198,7 +198,7 @@ public override string ToString() [Argument(ArgumentType.Multiple, SignatureType = typeof(SignatureDataSaver))] public IComponentFactory sub = (IComponentFactory)CmdParser.CreateComponentFactory( typeof(IComponentFactory), - typeof(SignatureDataSaver), + typeof(SignatureDataSaver), "Text"); [Argument(ArgumentType.Multiple, SignatureType = typeof(SignatureDataSaver))] @@ -405,4 +405,4 @@ private string GetResText(string resName) Done(); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdLineReverseTest.cs b/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdLineReverseTest.cs index 573dbdc333..88091e5081 100644 --- a/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdLineReverseTest.cs +++ b/test/Microsoft.ML.Predictor.Tests/CmdLine/CmdLineReverseTest.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -97,7 +97,7 @@ public void NewTest() var calibrator = ((IComponentFactory)factory).CreateComponent(ml); } } - + private delegate void SignatureSimpleComponent(); private class SimpleArg @@ -134,7 +134,7 @@ private class SimpleArg //public SubComponent[] sub4 = new SubComponent[] { new SubComponent("sub4", "settings4"), new SubComponent("sub5", "settings5") }; /// - /// ToString is overrided by CmdParser.GetSettings which is of primary for this test + /// ToString is overridden by CmdParser.GetSettings which is of primary for this test /// /// public string ToString(IHostEnvironment env) diff --git a/test/Microsoft.ML.Predictor.Tests/CompareBaselines.cs b/test/Microsoft.ML.Predictor.Tests/CompareBaselines.cs index 56981d34e4..9deca9d739 100644 --- a/test/Microsoft.ML.Predictor.Tests/CompareBaselines.cs +++ b/test/Microsoft.ML.Predictor.Tests/CompareBaselines.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -176,7 +176,7 @@ private void CompareFiles(TextWriter log, string root1, string root2, string rel string line2 = rdr2.ReadLine(); pos++; - LRestart: +LRestart: if (line1 == null && line2 == null) break; if (line1 == null) @@ -374,4 +374,4 @@ private static bool Eq(string line, int ich, string val) return true; } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.Predictor.Tests/Global.cs b/test/Microsoft.ML.Predictor.Tests/Global.cs index be9455917b..de8406abfb 100644 --- a/test/Microsoft.ML.Predictor.Tests/Global.cs +++ b/test/Microsoft.ML.Predictor.Tests/Global.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Predictor.Tests/ResultProcessor/TestResultProcessor.cs b/test/Microsoft.ML.Predictor.Tests/ResultProcessor/TestResultProcessor.cs index d01075e000..1a0e3cb737 100644 --- a/test/Microsoft.ML.Predictor.Tests/ResultProcessor/TestResultProcessor.cs +++ b/test/Microsoft.ML.Predictor.Tests/ResultProcessor/TestResultProcessor.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -52,7 +52,7 @@ private string[] SaveResourcesAsFiles(string[] resourceNames) } /// - /// A test for Processing the Result of a single Classfier + /// A test for Processing the Result of a single Classifier /// [Fact(Skip = "Need CoreTLC specific baseline update")] [TestCategory("Results Processor")] @@ -69,7 +69,7 @@ public void RPSingleClassifierTest() } /// - /// A test for Processing the Result of a single Classfier + /// A test for Processing the Result of a single Classifier /// [Fact(Skip = "Need CoreTLC specific baseline update")] [TestCategory("Results Processor")] @@ -103,7 +103,7 @@ public void RPSingleClassifierTestWIthEmptyLines() } /// - /// A test for Processing the Result of a Multiple Classfiers + /// A test for Processing the Result of a Multiple Classifiers /// [Fact(Skip = "Need CoreTLC specific baseline update")] [TestCategory("Results Processor")] @@ -120,7 +120,7 @@ public void RPMulticlassifierTest() } /// - /// A test for Processing the Result of a Multiple Classfiers + /// A test for Processing the Result of a Multiple Classifiers /// [Fact(Skip = "Need CoreTLC specific baseline update")] [TestCategory("Results Processor")] diff --git a/test/Microsoft.ML.Predictor.Tests/Test-API.cs b/test/Microsoft.ML.Predictor.Tests/Test-API.cs index f51444d27d..4033158a75 100644 --- a/test/Microsoft.ML.Predictor.Tests/Test-API.cs +++ b/test/Microsoft.ML.Predictor.Tests/Test-API.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -422,4 +422,4 @@ private void WeightedMetricTest(Instances noWeights, Instances weights1, Instanc } } #endif -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.Predictor.Tests/TestConcurrency.cs b/test/Microsoft.ML.Predictor.Tests/TestConcurrency.cs index 04388d6a4e..4b20ee8f36 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestConcurrency.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestConcurrency.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Predictor.Tests/TestCreateInstances.cs b/test/Microsoft.ML.Predictor.Tests/TestCreateInstances.cs index fed580a70e..e1d7244409 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestCreateInstances.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestCreateInstances.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Predictor.Tests/TestCrossValidation.cs b/test/Microsoft.ML.Predictor.Tests/TestCrossValidation.cs index 300ae3d235..5a4c474a3a 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestCrossValidation.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestCrossValidation.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Predictor.Tests/TestGamPublicInterfaces.cs b/test/Microsoft.ML.Predictor.Tests/TestGamPublicInterfaces.cs index 8895cdb44f..8984e96882 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestGamPublicInterfaces.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestGamPublicInterfaces.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -19,7 +19,7 @@ public TestGamPublicInterfaces(ITestOutputHelper output) : base(output) [Fact] [TestCategory("FastTree")] - public void TestGamDirectInstatiation() + public void TestGamDirectInstantiation() { var mlContext = new MLContext(seed: 1); diff --git a/test/Microsoft.ML.Predictor.Tests/TestIniModels.cs b/test/Microsoft.ML.Predictor.Tests/TestIniModels.cs index 578f92978c..46bca5b6c2 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestIniModels.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestIniModels.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -285,7 +285,7 @@ public class IniModelTestInformation } /// - /// Run INI test for a collection of combinationss of predictors and datasets. + /// Run INI test for a collection of combinations of predictors and datasets. /// /// /// diff --git a/test/Microsoft.ML.Predictor.Tests/TestParallelFasttreeInterface.cs b/test/Microsoft.ML.Predictor.Tests/TestParallelFasttreeInterface.cs index d3ea38b794..c0b0b3ea26 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestParallelFasttreeInterface.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestParallelFasttreeInterface.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index 6663343ae4..b8b6524d10 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -16,16 +16,16 @@ namespace Microsoft.ML.RunTests using Microsoft.ML.Data; using Microsoft.ML.EntryPoints; using Microsoft.ML.Internal.Utilities; - using Microsoft.ML.Trainers.LightGbm; using Microsoft.ML.Runtime; using Microsoft.ML.TestFramework; + using Microsoft.ML.TestFrameworkCommon; + using Microsoft.ML.TestFrameworkCommon.Attributes; using Microsoft.ML.Trainers; using Microsoft.ML.Trainers.FastTree; + using Microsoft.ML.Trainers.LightGbm; using Xunit; using Xunit.Abstractions; using TestLearners = TestLearnersBase; - using Microsoft.ML.TestFrameworkCommon; - using Microsoft.ML.TestFrameworkCommon.Attributes; /// /// Tests using maml commands (IDV) functionality. @@ -697,7 +697,7 @@ private void CombineAndTestTreeEnsembles(IDataView idv, PredictorModel[] fastTre } var cursors = new DataViewRowCursor[predCount]; - var cols = scored.Schema.Where( c => c.Name.Equals("Score") || c.Name.Equals("Probability") || c.Name.Equals("PredictedLabel")); + var cols = scored.Schema.Where(c => c.Name.Equals("Score") || c.Name.Equals("Probability") || c.Name.Equals("PredictedLabel")); for (int i = 0; i < predCount; i++) cursors[i] = scoredArray[i].GetRowCursor(cols); @@ -1215,7 +1215,7 @@ public void RegressorSdcaTest() Done(); } -#region "Regressor" + #region "Regressor" #if OLD_TESTS // REVIEW: Port these tests? /// @@ -1526,7 +1526,7 @@ public void RegressorSyntheticDuplicatedOlsTest() } #endif -#endregion + #endregion /// ///A test for FR ranker diff --git a/test/Microsoft.ML.Predictor.Tests/TestTransposer.cs b/test/Microsoft.ML.Predictor.Tests/TestTransposer.cs index 5c1adbb957..8f92a758f8 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestTransposer.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestTransposer.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Predictor.Tests/TestTrivialPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestTrivialPredictors.cs index d66a2a9b12..decfa5ae62 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestTrivialPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestTrivialPredictors.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Sweeper.Tests/SweeperTest.cs b/test/Microsoft.ML.Sweeper.Tests/SweeperTest.cs index 1e5006b381..002e47fa80 100644 --- a/test/Microsoft.ML.Sweeper.Tests/SweeperTest.cs +++ b/test/Microsoft.ML.Sweeper.Tests/SweeperTest.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.TestFramework/Attributes/BenchmarkTheoryAttribute.cs b/test/Microsoft.ML.TestFramework/Attributes/BenchmarkTheoryAttribute.cs index fae467176e..982ec4e580 100644 --- a/test/Microsoft.ML.TestFramework/Attributes/BenchmarkTheoryAttribute.cs +++ b/test/Microsoft.ML.TestFramework/Attributes/BenchmarkTheoryAttribute.cs @@ -26,4 +26,4 @@ public BenchmarkTheoryAttribute() : base(SkipMessage) protected override bool IsEnvironmentSupported() => _isEnvironmentSupported; } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFramework/Attributes/FeaturizersFactAttribute.cs b/test/Microsoft.ML.TestFramework/Attributes/FeaturizersFactAttribute.cs index 3d0c4e5a93..a6f1f34156 100644 --- a/test/Microsoft.ML.TestFramework/Attributes/FeaturizersFactAttribute.cs +++ b/test/Microsoft.ML.TestFramework/Attributes/FeaturizersFactAttribute.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -30,7 +30,7 @@ protected override bool IsEnvironmentSupported() using (Process process = new Process()) { process.StartInfo.FileName = "/bin/bash"; - process.StartInfo.Arguments= "-c \"cat /etc/*-release\""; + process.StartInfo.Arguments = "-c \"cat /etc/*-release\""; process.StartInfo.UseShellExecute = false; process.StartInfo.RedirectStandardOutput = true; process.StartInfo.CreateNoWindow = true; @@ -48,4 +48,4 @@ protected override bool IsEnvironmentSupported() return true; } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFramework/Attributes/LightGBMFactAttribute.cs b/test/Microsoft.ML.TestFramework/Attributes/LightGBMFactAttribute.cs index df40e5da0f..634b8a5251 100644 --- a/test/Microsoft.ML.TestFramework/Attributes/LightGBMFactAttribute.cs +++ b/test/Microsoft.ML.TestFramework/Attributes/LightGBMFactAttribute.cs @@ -23,4 +23,4 @@ protected override bool IsEnvironmentSupported() return Environment.Is64BitProcess && NativeLibrary.NativeLibraryExists("lib_lightgbm"); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFramework/Attributes/OnnxTheoryAttribute.cs b/test/Microsoft.ML.TestFramework/Attributes/OnnxTheoryAttribute.cs index 42958a6f75..38841bf895 100644 --- a/test/Microsoft.ML.TestFramework/Attributes/OnnxTheoryAttribute.cs +++ b/test/Microsoft.ML.TestFramework/Attributes/OnnxTheoryAttribute.cs @@ -25,4 +25,4 @@ protected override bool IsEnvironmentSupported() && Microsoft.ML.TestFrameworkCommon.Utility.NativeLibrary.NativeLibraryExists("onnxruntime"); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFramework/Attributes/TensorflowFactAttribute.cs b/test/Microsoft.ML.TestFramework/Attributes/TensorflowFactAttribute.cs index f2c3ebb86b..2455d61381 100644 --- a/test/Microsoft.ML.TestFramework/Attributes/TensorflowFactAttribute.cs +++ b/test/Microsoft.ML.TestFramework/Attributes/TensorflowFactAttribute.cs @@ -21,7 +21,7 @@ public TensorFlowFactAttribute() : base("TensorFlow is 64-bit only and is not su protected override bool IsEnvironmentSupported() { return (Environment.Is64BitProcess && - ( RuntimeInformation.IsOSPlatform(OSPlatform.Windows) || + (RuntimeInformation.IsOSPlatform(OSPlatform.Windows) || AttributeHelpers.CheckLibcVersionGreaterThanMinimum(new Version(2, 23)))) && Microsoft.ML.TestFrameworkCommon.Utility.NativeLibrary.NativeLibraryExists("tensorflow"); diff --git a/test/Microsoft.ML.TestFramework/Attributes/TensorflowTheoryAttribute.cs b/test/Microsoft.ML.TestFramework/Attributes/TensorflowTheoryAttribute.cs index ff282187a3..9c6de5fd4c 100644 --- a/test/Microsoft.ML.TestFramework/Attributes/TensorflowTheoryAttribute.cs +++ b/test/Microsoft.ML.TestFramework/Attributes/TensorflowTheoryAttribute.cs @@ -26,4 +26,4 @@ protected override bool IsEnvironmentSupported() && Microsoft.ML.TestFrameworkCommon.Utility.NativeLibrary.NativeLibraryExists("tensorflow"); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFramework/Attributes/X64FactAttribute.cs b/test/Microsoft.ML.TestFramework/Attributes/X64FactAttribute.cs index 6b00d475bd..fcc015a73b 100644 --- a/test/Microsoft.ML.TestFramework/Attributes/X64FactAttribute.cs +++ b/test/Microsoft.ML.TestFramework/Attributes/X64FactAttribute.cs @@ -22,4 +22,4 @@ protected override bool IsEnvironmentSupported() return Environment.Is64BitProcess; } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFramework/Attributes/X86X64FactAttribute.cs b/test/Microsoft.ML.TestFramework/Attributes/X86X64FactAttribute.cs index 9a24f0afaa..c4f6c2553e 100644 --- a/test/Microsoft.ML.TestFramework/Attributes/X86X64FactAttribute.cs +++ b/test/Microsoft.ML.TestFramework/Attributes/X86X64FactAttribute.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -23,4 +23,4 @@ protected override bool IsEnvironmentSupported() return RuntimeInformation.ProcessArchitecture == Architecture.X86 || RuntimeInformation.ProcessArchitecture == Architecture.X64; } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs b/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs index aa353cfd1d..2ebdaf4ac7 100644 --- a/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs +++ b/test/Microsoft.ML.TestFramework/BaseTestBaseline.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -127,9 +127,9 @@ private IEnumerable GetConfigurationDirs() if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) { - if(RuntimeInformation.ProcessArchitecture == Architecture.X64) + if (RuntimeInformation.ProcessArchitecture == Architecture.X64) configurationDirs.Add("osx-x64"); - else if(RuntimeInformation.ProcessArchitecture == Architecture.Arm64) + else if (RuntimeInformation.ProcessArchitecture == Architecture.Arm64) configurationDirs.Add("osx-arm64"); } if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) @@ -144,9 +144,9 @@ private IEnumerable GetConfigurationDirs() // The small difference comes from CPUMath using different instruction set: // 1. net framework and net core 2.1 uses CpuMathUtils.netstandard that uses SSE instruction set; // 2. net core 3.1 uses CpuMathUtils.netcoreapp that uses AVX, SSE or direct floating point calculation - // depending on hardward avaibility. + // depending on hardward availability. // AVX and SSE generates slightly different result due to nature of floating point math. - // So Ideally we should adding AVX support at CPUMath native library, + // So Ideally we should adding AVX support at CPUMath native library, // use below issue to track: https://github.com/dotnet/machinelearning/issues/5044 // don't need netcoreapp21 as this is the default case if (AppDomain.CurrentDomain.GetData("FX_PRODUCT_VERSION") != null) @@ -468,7 +468,7 @@ private IEnumerator LineEnumerator(TextReader reader, Func /// /// Checks that 's contents are a suffix of 's - /// contents, assuming one skips lines from , and + /// contents, assuming one skips lines from , and /// the file is read up to the line (or to the end, if it's not provided). /// protected bool CheckOutputIsSuffix(string basePath, string outPath, int skip = 0, string tailSignature = null) @@ -609,7 +609,7 @@ protected bool CheckOutputIsSuffix(string basePath, string outPath, int skip = 0 return true; } - public bool CompareNumbersWithTolerance(double expected, double actual, int? iterationOnCollection = null, + public bool CompareNumbersWithTolerance(double expected, double actual, int? iterationOnCollection = null, int digitsOfPrecision = DigitsOfPrecision, bool logFailure = true) { if (double.IsNaN(expected) && double.IsNaN(actual)) @@ -618,17 +618,17 @@ protected bool CheckOutputIsSuffix(string basePath, string outPath, int skip = 0 // this follows the IEEE recommendations for how to compare floating point numbers double allowedVariance = Math.Pow(10, -digitsOfPrecision); double delta = Round(expected, digitsOfPrecision) - Round(actual, digitsOfPrecision); - // limitting to the digits we care about. + // limitting to the digits we care about. delta = Math.Round(delta, digitsOfPrecision); bool inRange = delta >= -allowedVariance && delta <= allowedVariance; // for some cases, rounding up is not beneficial - // so checking on whether the difference is significant prior to rounding, and failing only then. - // example, for 5 digits of precision. + // so checking on whether the difference is significant prior to rounding, and failing only then. + // example, for 5 digits of precision. // F1 = 1.82844949 Rounds to 1.8284 // F2 = 1.8284502 Rounds to 1.8285 - // would fail the inRange == true check, but would suceed the following, and we doconsider those two numbers + // would fail the inRange == true check, but would succeed the following, and we do consider those two numbers // (1.82844949 - 1.8284502) = -0.00000071 double delta2 = 0; @@ -642,7 +642,7 @@ protected bool CheckOutputIsSuffix(string basePath, string outPath, int skip = 0 { var message = iterationOnCollection != null ? "" : $"Output and baseline mismatch at line {iterationOnCollection}." + Environment.NewLine; - if(logFailure) + if (logFailure) Fail(message + $"Values to compare are {expected} and {actual}" + Environment.NewLine + $"\t AllowedVariance: {allowedVariance}" + Environment.NewLine + @@ -668,7 +668,7 @@ private static double Round(double value, int digitsOfPrecision) } /// - /// Takes in 2 IDataViews and compares the specified column. + /// Takes in 2 IDataViews and compares the specified column. /// /// The name of the left column to compare. /// The name of the right column to compare. @@ -737,7 +737,7 @@ private void CompareSelectedColumns(string leftColumnName, string rightColumn { expectedScalarGetter = expectedCursor.GetGetter(leftColumn); - // If the right column is from onxx it will still be a VBuffer, just has a length of 1. + // If the right column is from onnx it will still be a VBuffer, just has a length of 1. if (isRightColumnOnnxScalar) actualVectorGetter = actualCursor.GetGetter>(rightColumn); else @@ -760,7 +760,7 @@ private void CompareSelectedColumns(string leftColumnName, string rightColumn { expectedScalarGetter(ref expectedScalar); - // If the right column is from onxx get a VBuffer instead and just use the first value. + // If the right column is from onnx get a VBuffer instead and just use the first value. if (isRightColumnOnnxScalar) { actualVectorGetter(ref actualVector); @@ -967,12 +967,12 @@ public void RunMTAThread(ThreadStart fn) /// /// Opens a stream writer for the specified file using the specified encoding and buffer size. - /// If the file exists, it can be either overwritten or appended to. + /// If the file exists, it can be either overwritten or appended to. /// If the file does not exist, a new file is created. /// /// The complete file path to write to. /// - /// true to append data to the file; false to overwrite the file. + /// true to append data to the file; false to overwrite the file. /// If the specified file does not exist, this parameter has no effect and a new file is created. /// /// The character encoding to use. @@ -998,7 +998,7 @@ protected static StreamReader OpenReader(string path) } /// - /// Invoke MAML with specified arguments without output baseline. + /// Invoke MAML with specified arguments without output baseline. /// This method is used in unit tests when the output is not baselined. /// If the output is to be baselined and compared, the other overload should be used. /// diff --git a/test/Microsoft.ML.TestFramework/BaseTestClass.cs b/test/Microsoft.ML.TestFramework/BaseTestClass.cs index 38c7496a3c..102fa205ed 100644 --- a/test/Microsoft.ML.TestFramework/BaseTestClass.cs +++ b/test/Microsoft.ML.TestFramework/BaseTestClass.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -128,4 +128,4 @@ protected string DeleteOutputPath(string name) return TestCommon.DeleteOutputPath(OutDir, name); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFramework/BaseTestPredictorsMaml.cs b/test/Microsoft.ML.TestFramework/BaseTestPredictorsMaml.cs index 20734fa4e2..0cff1a5fb3 100644 --- a/test/Microsoft.ML.TestFramework/BaseTestPredictorsMaml.cs +++ b/test/Microsoft.ML.TestFramework/BaseTestPredictorsMaml.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -210,7 +210,7 @@ protected void Run(RunContext ctx, int digitsOfPrecision = DigitsOfPrecision, Nu // Run result processor on the console output. RunResultProcessorTest(new string[] { consOutPath.Path }, rpOutPath, rpArgs); - CheckEqualityNormalized(dir, rpName, digitsOfPrecision:digitsOfPrecision, parseOption: parseOption); + CheckEqualityNormalized(dir, rpName, digitsOfPrecision: digitsOfPrecision, parseOption: parseOption); } // Check the prediction output against its baseline. diff --git a/test/Microsoft.ML.TestFramework/CopyAction.cs b/test/Microsoft.ML.TestFramework/CopyAction.cs index eb1dcf26ef..69d81d8c7f 100644 --- a/test/Microsoft.ML.TestFramework/CopyAction.cs +++ b/test/Microsoft.ML.TestFramework/CopyAction.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.TestFramework/DataPipe/Parquet.cs b/test/Microsoft.ML.TestFramework/DataPipe/Parquet.cs index 53c44e1a67..452e4ffa4a 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/Parquet.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/Parquet.cs @@ -20,7 +20,7 @@ protected override void Initialize() public void TestParquetPrimitiveDataTypes() { string pathData = GetDataPath(@"Parquet", "alltypes.parquet"); - TestCore(pathData, false, new[] { "loader=Parquet{bigIntDates=+}" } ); + TestCore(pathData, false, new[] { "loader=Parquet{bigIntDates=+}" }); Done(); } diff --git a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs index ddaf81c4fa..252ffb61b3 100644 --- a/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs +++ b/test/Microsoft.ML.TestFramework/DataPipe/TestDataPipeBase.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -411,7 +411,7 @@ protected void VerifyArgParsing(IHostEnvironment env, string[] strs) view = new ChooseColumnsByIndexTransform(env, chooseargs, view); } - var args = new TextLoader.Options() { AllowSparse = true, AllowQuoting = true}; + var args = new TextLoader.Options() { AllowSparse = true, AllowQuoting = true }; if (!CmdParser.ParseArguments(Env, argsLoader, args)) { Fail("Couldn't parse the args '{0}' in '{1}'", argsLoader, pathData); @@ -431,7 +431,7 @@ protected void VerifyArgParsing(IHostEnvironment env, string[] strs) return true; } - protected private string SavePipe(ILegacyDataLoader pipe, string suffix = "", string dir = "Pipeline") + private protected string SavePipe(ILegacyDataLoader pipe, string suffix = "", string dir = "Pipeline") { string name = TestName + suffix + ".zip"; string pathModel = DeleteOutputPath("SavePipe", name); @@ -665,7 +665,7 @@ protected bool CheckSameValues(IDataView view1, IDataView view2, bool exactTypes Check(tmp, "All same failed"); all &= tmp; - var view2EvenCols = view2.Schema.Where(col => (col.Index & 1) == 0); + var view2EvenCols = view2.Schema.Where(col => (col.Index & 1) == 0); using (var curs1 = view1.GetRowCursorForAllColumns()) using (var curs2 = view2.GetRowCursor(view2EvenCols)) { diff --git a/test/Microsoft.ML.TestFramework/GlobalBase.cs b/test/Microsoft.ML.TestFramework/GlobalBase.cs index d09f447ce6..6aa8ddce03 100644 --- a/test/Microsoft.ML.TestFramework/GlobalBase.cs +++ b/test/Microsoft.ML.TestFramework/GlobalBase.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.TestFramework/Learners.cs b/test/Microsoft.ML.TestFramework/Learners.cs index ab19eb7ce8..ae4ece26c0 100644 --- a/test/Microsoft.ML.TestFramework/Learners.cs +++ b/test/Microsoft.ML.TestFramework/Learners.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -813,4 +813,4 @@ public static PredictorAndArgs DssmDefault(int qryFeaturesCount, int docFeatures BaselineProgress = true }; } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFramework/Properties/AssemblyInfo.cs b/test/Microsoft.ML.TestFramework/Properties/AssemblyInfo.cs index bdeb8111e2..d74297b8f7 100644 --- a/test/Microsoft.ML.TestFramework/Properties/AssemblyInfo.cs +++ b/test/Microsoft.ML.TestFramework/Properties/AssemblyInfo.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Runtime.CompilerServices; diff --git a/test/Microsoft.ML.TestFramework/RemoteExecutor.cs b/test/Microsoft.ML.TestFramework/RemoteExecutor.cs index 39775b4be1..365ce0d7c4 100644 --- a/test/Microsoft.ML.TestFramework/RemoteExecutor.cs +++ b/test/Microsoft.ML.TestFramework/RemoteExecutor.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.TestFramework/SubComponent.cs b/test/Microsoft.ML.TestFramework/SubComponent.cs index bfb6ece074..496bf58ec3 100644 --- a/test/Microsoft.ML.TestFramework/SubComponent.cs +++ b/test/Microsoft.ML.TestFramework/SubComponent.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.TestFramework/TestCommandBase.cs b/test/Microsoft.ML.TestFramework/TestCommandBase.cs index 1269ad9bb0..7a07cf73c3 100644 --- a/test/Microsoft.ML.TestFramework/TestCommandBase.cs +++ b/test/Microsoft.ML.TestFramework/TestCommandBase.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.TestFramework/TestInitialization.cs b/test/Microsoft.ML.TestFramework/TestInitialization.cs index 0684e34e20..403bfc0f93 100644 --- a/test/Microsoft.ML.TestFramework/TestInitialization.cs +++ b/test/Microsoft.ML.TestFramework/TestInitialization.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.TestFramework/TestSparseDataView.cs b/test/Microsoft.ML.TestFramework/TestSparseDataView.cs index 73e77f3052..972cdec5f2 100644 --- a/test/Microsoft.ML.TestFramework/TestSparseDataView.cs +++ b/test/Microsoft.ML.TestFramework/TestSparseDataView.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.TestFrameworkCommon/Attributes/AttributeHelpers.cs b/test/Microsoft.ML.TestFrameworkCommon/Attributes/AttributeHelpers.cs index abdc70cb87..fdae4ab0dc 100644 --- a/test/Microsoft.ML.TestFrameworkCommon/Attributes/AttributeHelpers.cs +++ b/test/Microsoft.ML.TestFrameworkCommon/Attributes/AttributeHelpers.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -40,4 +40,4 @@ public static bool CheckLibcVersionGreaterThanMinimum(Version minVersion) [DllImport("libc", ExactSpelling = true, CallingConvention = CallingConvention.Cdecl)] private static extern IntPtr gnu_get_libc_version(); } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFrameworkCommon/Attributes/EnvironmentSpecificFactAttribute.cs b/test/Microsoft.ML.TestFrameworkCommon/Attributes/EnvironmentSpecificFactAttribute.cs index fe754b6eb2..dc2c74e5f4 100644 --- a/test/Microsoft.ML.TestFrameworkCommon/Attributes/EnvironmentSpecificFactAttribute.cs +++ b/test/Microsoft.ML.TestFrameworkCommon/Attributes/EnvironmentSpecificFactAttribute.cs @@ -31,4 +31,4 @@ protected EnvironmentSpecificFactAttribute(string skipMessage) /// protected abstract bool IsEnvironmentSupported(); } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFrameworkCommon/Attributes/EnvironmentSpecificTheoryAttribute.cs b/test/Microsoft.ML.TestFrameworkCommon/Attributes/EnvironmentSpecificTheoryAttribute.cs index c8f4b44c56..26173f5267 100644 --- a/test/Microsoft.ML.TestFrameworkCommon/Attributes/EnvironmentSpecificTheoryAttribute.cs +++ b/test/Microsoft.ML.TestFrameworkCommon/Attributes/EnvironmentSpecificTheoryAttribute.cs @@ -31,4 +31,4 @@ protected EnvironmentSpecificTheoryAttribute(string skipMessage) /// protected abstract bool IsEnvironmentSupported(); } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFrameworkCommon/Attributes/FieldAwareFactorizationMachineFactAttribute.cs b/test/Microsoft.ML.TestFrameworkCommon/Attributes/FieldAwareFactorizationMachineFactAttribute.cs index 798a20054b..f9636f112d 100644 --- a/test/Microsoft.ML.TestFrameworkCommon/Attributes/FieldAwareFactorizationMachineFactAttribute.cs +++ b/test/Microsoft.ML.TestFrameworkCommon/Attributes/FieldAwareFactorizationMachineFactAttribute.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -11,7 +11,7 @@ public class FieldAwareFactorizationMachineFactAttribute : EnvironmentSpecificFa { private const string SkipMessage = "FieldAwareFactorizationMachine doesn't currently support non x86/x64. https://github.com/dotnet/machinelearning/issues/5871"; - public FieldAwareFactorizationMachineFactAttribute () : base(SkipMessage) + public FieldAwareFactorizationMachineFactAttribute() : base(SkipMessage) { } diff --git a/test/Microsoft.ML.TestFrameworkCommon/Attributes/LoggingLevelAttribute.cs b/test/Microsoft.ML.TestFrameworkCommon/Attributes/LoggingLevelAttribute.cs index 54b434d5ad..f515f93054 100644 --- a/test/Microsoft.ML.TestFrameworkCommon/Attributes/LoggingLevelAttribute.cs +++ b/test/Microsoft.ML.TestFrameworkCommon/Attributes/LoggingLevelAttribute.cs @@ -1,4 +1,8 @@ -using System; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; using System.Collections.Generic; using System.Linq; using System.Text; @@ -9,7 +13,7 @@ namespace Microsoft.ML.TestFrameworkCommon.Attributes { public sealed class LogMessageKind : Attribute { - public ChannelMessageKind MessageKind { get; } + public ChannelMessageKind MessageKind { get; } public LogMessageKind(ChannelMessageKind messageKind) { MessageKind = messageKind; diff --git a/test/Microsoft.ML.TestFrameworkCommon/Attributes/NativeDependencyFactAttribute.cs b/test/Microsoft.ML.TestFrameworkCommon/Attributes/NativeDependencyFactAttribute.cs index fbb3aba0fb..29bd0baa17 100644 --- a/test/Microsoft.ML.TestFrameworkCommon/Attributes/NativeDependencyFactAttribute.cs +++ b/test/Microsoft.ML.TestFrameworkCommon/Attributes/NativeDependencyFactAttribute.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.TestFrameworkCommon.Utility; using Microsoft.ML.TestFrameworkCommon.Attributes; +using Microsoft.ML.TestFrameworkCommon.Utility; namespace Microsoft.ML.TestFramework.Attributes { @@ -22,4 +22,4 @@ protected override bool IsEnvironmentSupported() return NativeLibrary.NativeLibraryExists(_library); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFrameworkCommon/Attributes/NativeDependencyTheoryAttribute.cs b/test/Microsoft.ML.TestFrameworkCommon/Attributes/NativeDependencyTheoryAttribute.cs index 6e97944050..444e17b5e6 100644 --- a/test/Microsoft.ML.TestFrameworkCommon/Attributes/NativeDependencyTheoryAttribute.cs +++ b/test/Microsoft.ML.TestFrameworkCommon/Attributes/NativeDependencyTheoryAttribute.cs @@ -2,8 +2,8 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.TestFrameworkCommon.Utility; using Microsoft.ML.TestFrameworkCommon.Attributes; +using Microsoft.ML.TestFrameworkCommon.Utility; namespace Microsoft.ML.TestFramework.Attributes { @@ -22,4 +22,4 @@ protected override bool IsEnvironmentSupported() return NativeLibrary.NativeLibraryExists(_library); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFrameworkCommon/Attributes/OnnxFactAttribute.cs b/test/Microsoft.ML.TestFrameworkCommon/Attributes/OnnxFactAttribute.cs index 7639a2b425..cf2414427c 100644 --- a/test/Microsoft.ML.TestFrameworkCommon/Attributes/OnnxFactAttribute.cs +++ b/test/Microsoft.ML.TestFrameworkCommon/Attributes/OnnxFactAttribute.cs @@ -29,4 +29,4 @@ protected override bool IsEnvironmentSupported() || AttributeHelpers.CheckLibcVersionGreaterThanMinimum(new Version(2, 23))) && Utility.NativeLibrary.NativeLibraryExists("onnxruntime"); } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TestFrameworkCommon/Datasets.cs b/test/Microsoft.ML.TestFrameworkCommon/Datasets.cs index dcdcbb8d53..0882fbd204 100644 --- a/test/Microsoft.ML.TestFrameworkCommon/Datasets.cs +++ b/test/Microsoft.ML.TestFrameworkCommon/Datasets.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.TestFrameworkCommon/TestLogger.cs b/test/Microsoft.ML.TestFrameworkCommon/TestLogger.cs index ec615d85b3..67a3aaa5f3 100644 --- a/test/Microsoft.ML.TestFrameworkCommon/TestLogger.cs +++ b/test/Microsoft.ML.TestFrameworkCommon/TestLogger.cs @@ -1,4 +1,8 @@ -using System; +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; using System.IO; using System.Text; using Xunit.Abstractions; diff --git a/test/Microsoft.ML.Tests/AnomalyDetectionTests.cs b/test/Microsoft.ML.Tests/AnomalyDetectionTests.cs index d2b1104bdb..3d00592191 100644 --- a/test/Microsoft.ML.Tests/AnomalyDetectionTests.cs +++ b/test/Microsoft.ML.Tests/AnomalyDetectionTests.cs @@ -174,10 +174,10 @@ private static void ExecutePipelineWithGivenRandomizedPcaTrainer(MLContext mlCon } - /// - /// Help function used to execute trainers defined in . - /// - private static void ExecuteRandomizedPcaTrainerChangeThreshold(MLContext mlContext, Trainers.RandomizedPcaTrainer trainer) + /// + /// Help function used to execute trainers defined in . + /// + private static void ExecuteRandomizedPcaTrainerChangeThreshold(MLContext mlContext, Trainers.RandomizedPcaTrainer trainer) { var samples = new List() { @@ -282,7 +282,7 @@ public void PcaTrainerInvalidEigenvectorsException() // and produce eigenvectors with NaN. var model = trainer.Fit(data); } - catch(ArgumentOutOfRangeException ex) + catch (ArgumentOutOfRangeException ex) { exceptionThrown = true; Assert.Contains("The learnt eigenvectors contained NaN values", ex.Message); diff --git a/test/Microsoft.ML.Tests/CachingTests.cs b/test/Microsoft.ML.Tests/CachingTests.cs index 1393957e23..b54a02bb75 100644 --- a/test/Microsoft.ML.Tests/CachingTests.cs +++ b/test/Microsoft.ML.Tests/CachingTests.cs @@ -65,7 +65,7 @@ public void CacheCheckpointTest() public void CacheOnEmptyEstimatorChainTest() { var ex = Assert.Throws(() => CacheOnEmptyEstimatorChain()); - Assert.Contains("Current estimator chain has no estimator, can't append cache checkpoint.", ex.Message, + Assert.Contains("Current estimator chain has no estimator, can't append cache checkpoint.", ex.Message, StringComparison.InvariantCultureIgnoreCase); } diff --git a/test/Microsoft.ML.Tests/CalibratedModelParametersTests.cs b/test/Microsoft.ML.Tests/CalibratedModelParametersTests.cs index 2ff02b7781..0b0e4b241f 100644 --- a/test/Microsoft.ML.Tests/CalibratedModelParametersTests.cs +++ b/test/Microsoft.ML.Tests/CalibratedModelParametersTests.cs @@ -47,7 +47,7 @@ public void TestValueMapperCalibratedModelParametersLoading() var data = GetDenseDataset(); var model = ML.BinaryClassification.Trainers.Gam( - new GamBinaryTrainer.Options { NumberOfThreads = 1}).Fit(data); + new GamBinaryTrainer.Options { NumberOfThreads = 1 }).Fit(data); var modelAndSchemaPath = GetOutputPath("TestValueMapperCalibratedModelParametersLoading.zip"); ML.Model.Save(model, data.Schema, modelAndSchemaPath); @@ -70,7 +70,7 @@ public void TestFeatureWeightsCalibratedModelParametersLoading() var data = GetDenseDataset(); var model = ML.BinaryClassification.Trainers.FastTree( - new FastTreeBinaryTrainer.Options { NumberOfThreads = 1}).Fit(data); + new FastTreeBinaryTrainer.Options { NumberOfThreads = 1 }).Fit(data); var modelAndSchemaPath = GetOutputPath("TestFeatureWeightsCalibratedModelParametersLoading.zip"); ML.Model.Save(model, data.Schema, modelAndSchemaPath); @@ -119,7 +119,7 @@ private IDataView GetDenseDataset() yArray[i] = (float)(10 * x1 + 20 * x2Important + 5.5 * x3 + noise); } - GetBinaryClassificationLabels(yArray); + GetBinaryClassificationLabels(yArray); // Create data view. var bldr = new ArrayDataViewBuilder(Env); diff --git a/test/Microsoft.ML.Tests/CollectionsDataViewTest.cs b/test/Microsoft.ML.Tests/CollectionsDataViewTest.cs index fb1fe59dd3..ae5b323338 100644 --- a/test/Microsoft.ML.Tests/CollectionsDataViewTest.cs +++ b/test/Microsoft.ML.Tests/CollectionsDataViewTest.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -278,8 +278,8 @@ public abstract class BaseClassWithInheritedProperties public class ClassWithPrivateFieldsAndProperties { public ClassWithPrivateFieldsAndProperties() { seq++; _unusedStaticField++; _unusedPrivateField1 = 100; } - static public int seq; - static public int _unusedStaticField; + public static int seq; + public static int _unusedStaticField; private int _unusedPrivateField1; private string _fString; diff --git a/test/Microsoft.ML.Tests/EvaluateTests.cs b/test/Microsoft.ML.Tests/EvaluateTests.cs index ee5c58016f..28a6cfa3a5 100644 --- a/test/Microsoft.ML.Tests/EvaluateTests.cs +++ b/test/Microsoft.ML.Tests/EvaluateTests.cs @@ -52,16 +52,20 @@ public void MulticlassEvaluatorTopKArray() // After introducing a sample whose label was unseen (i.e. the Score array doesn't assign it a probability) // then the Top K array changes, as its values are divided by the total number of instances // that were evaluated. - var inputArray2 = inputArray.AppendElement(new MulticlassEvaluatorInput { - Label = 5, Score = new[] { 0.1f, 0.3f, 0.2f, 0.4f }, PredictedLabel = 3 }); - + var inputArray2 = inputArray.AppendElement(new MulticlassEvaluatorInput + { + Label = 5, + Score = new[] { 0.1f, 0.3f, 0.2f, 0.4f }, + PredictedLabel = 3 + }); + var expectedTopKArray2 = new[] { 0.2d, 0.4d, 0.6d, 0.8d }; var inputDV2 = mlContext.Data.LoadFromEnumerable(inputArray2); var metrics2 = mlContext.MulticlassClassification.Evaluate(inputDV2, topKPredictionCount: 4); - var outpu2 = metrics2.TopKAccuracyForAllK.ToArray(); + var output2 = metrics2.TopKAccuracyForAllK.ToArray(); for (int i = 0; i < expectedTopKArray2.Length; i++) - Assert.Equal(expectedTopKArray2[i], outpu2[i], precision: 7); + Assert.Equal(expectedTopKArray2[i], output2[i], precision: 7); } } } diff --git a/test/Microsoft.ML.Tests/ExpressionLanguageTests/ExpressionLanguageTests.cs b/test/Microsoft.ML.Tests/ExpressionLanguageTests/ExpressionLanguageTests.cs index 3180440f9a..17e85ce760 100644 --- a/test/Microsoft.ML.Tests/ExpressionLanguageTests/ExpressionLanguageTests.cs +++ b/test/Microsoft.ML.Tests/ExpressionLanguageTests/ExpressionLanguageTests.cs @@ -15,14 +15,14 @@ using System.Threading; using Microsoft.ML; using Microsoft.ML.Data; -using Microsoft.ML.Data.IO; using Microsoft.ML.Data.Conversion; +using Microsoft.ML.Data.IO; using Microsoft.ML.Internal.Utilities; -using Microsoft.ML.Runtime; -using Microsoft.ML.Transforms; using Microsoft.ML.RunTests; -using Microsoft.ML.Tests; +using Microsoft.ML.Runtime; using Microsoft.ML.TestFramework.Attributes; +using Microsoft.ML.Tests; +using Microsoft.ML.Transforms; using Xunit; using Xunit.Abstractions; @@ -210,7 +210,7 @@ private void Run(string name) TestFuncs1.Writer = null; } - LDone: +LDone: wrt.WriteLine("===== End {0} =====", scriptName); } } @@ -426,7 +426,7 @@ public sealed class TestFuncs1 : IFunctionProvider // Should probably figure out a proper way to do this. internal static TextWriter Writer; - private volatile static TestFuncs1 _instance; + private static volatile TestFuncs1 _instance; public static TestFuncs1 Instance { @@ -611,7 +611,7 @@ public static TX DumpChars(TX a) public sealed class TestFuncs2 : IFunctionProvider { - private volatile static TestFuncs2 _instance; + private static volatile TestFuncs2 _instance; public static TestFuncs2 Instance { get diff --git a/test/Microsoft.ML.Tests/FeatureContributionTests.cs b/test/Microsoft.ML.Tests/FeatureContributionTests.cs index d308755b87..5ff055eafd 100644 --- a/test/Microsoft.ML.Tests/FeatureContributionTests.cs +++ b/test/Microsoft.ML.Tests/FeatureContributionTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Tests/ImagesTests.cs b/test/Microsoft.ML.Tests/ImagesTests.cs index 729f81dccd..a8e3a2049c 100644 --- a/test/Microsoft.ML.Tests/ImagesTests.cs +++ b/test/Microsoft.ML.Tests/ImagesTests.cs @@ -220,7 +220,7 @@ public void TestGreyscaleTransformImages() public void TestGrayScaleInMemory() { // Create an image list. - var images = new List(){ new ImageDataPoint(10, 10, Color.Blue), new ImageDataPoint(10, 10, Color.Red) }; + var images = new List() { new ImageDataPoint(10, 10, Color.Blue), new ImageDataPoint(10, 10, Color.Red) }; // Convert the list of data points to an IDataView object, which is consumable by ML.NET API. var data = ML.Data.LoadFromEnumerable(images); @@ -318,9 +318,9 @@ public void TestBackAndForthConversionWithAlphaInterleave() var images = new ImageLoadingTransformer(env, imageFolder, ("ImageReal", "ImagePath")).Transform(data); var cropped = new ImageResizingTransformer(env, "ImageCropped", imageWidth, imageHeight, "ImageReal").Transform(images); - var pixels = new ImagePixelExtractingTransformer(env, "ImagePixels", "ImageCropped", ImagePixelExtractingEstimator.ColorBits.All, interleavePixelColors: true, scaleImage: 2f/19, offsetImage: 30).Transform(cropped); + var pixels = new ImagePixelExtractingTransformer(env, "ImagePixels", "ImageCropped", ImagePixelExtractingEstimator.ColorBits.All, interleavePixelColors: true, scaleImage: 2f / 19, offsetImage: 30).Transform(cropped); IDataView backToBitmaps = new VectorToImageConvertingTransformer(env, "ImageRestored", imageHeight, imageWidth, "ImagePixels", - ImagePixelExtractingEstimator.ColorBits.All, interleavedColors: true, scaleImage: 19/2f, offsetImage: -30).Transform(pixels); + ImagePixelExtractingEstimator.ColorBits.All, interleavedColors: true, scaleImage: 19 / 2f, offsetImage: -30).Transform(pixels); var fname = nameof(TestBackAndForthConversionWithAlphaInterleave) + "_model.zip"; @@ -432,9 +432,9 @@ public void TestBackAndForthConversionWithDifferentOrder() var images = new ImageLoadingTransformer(env, imageFolder, ("ImageReal", "ImagePath")).Transform(data); var cropped = new ImageResizingTransformer(env, "ImageCropped", imageWidth, imageHeight, "ImageReal").Transform(images); - var pixels = new ImagePixelExtractingTransformer(env, "ImagePixels", "ImageCropped", ImagePixelExtractingEstimator.ColorBits.All, orderOfExtraction:ImagePixelExtractingEstimator.ColorsOrder.ABRG).Transform(cropped); + var pixels = new ImagePixelExtractingTransformer(env, "ImagePixels", "ImageCropped", ImagePixelExtractingEstimator.ColorBits.All, orderOfExtraction: ImagePixelExtractingEstimator.ColorsOrder.ABRG).Transform(cropped); IDataView backToBitmaps = new VectorToImageConvertingTransformer(env, "ImageRestored", imageHeight, imageWidth, "ImagePixels", - ImagePixelExtractingEstimator.ColorBits.All,orderOfColors: ImagePixelExtractingEstimator.ColorsOrder.ABRG).Transform(pixels); + ImagePixelExtractingEstimator.ColorBits.All, orderOfColors: ImagePixelExtractingEstimator.ColorsOrder.ABRG).Transform(pixels); var fname = nameof(TestBackAndForthConversionWithDifferentOrder) + "_model.zip"; @@ -898,7 +898,7 @@ private static IEnumerable GenerateRandomDataPoints(int count, int se { yield return new DataPoint { - Features = Enumerable.Repeat(0, InputSize).Select(x => random.NextDouble()*100).ToArray() + Features = Enumerable.Repeat(0, InputSize).Select(x => random.NextDouble() * 100).ToArray() }; } } @@ -983,7 +983,7 @@ public void ResizeInMemoryImages() // Also check usage of prediction Engine // And that the references to the original image objects aren't lost var predEngine = mlContext.Model.CreatePredictionEngine(model); - for(int i = 0; i < dataObjects.Count(); i++) + for (int i = 0; i < dataObjects.Count(); i++) { var prediction = predEngine.Predict(dataObjects[i]); Assert.Equal(100, prediction.ResizedImage.Height); diff --git a/test/Microsoft.ML.Tests/OnnxConversionTest.cs b/test/Microsoft.ML.Tests/OnnxConversionTest.cs index e3023a10e9..6fde298c52 100644 --- a/test/Microsoft.ML.Tests/OnnxConversionTest.cs +++ b/test/Microsoft.ML.Tests/OnnxConversionTest.cs @@ -88,7 +88,7 @@ public void SimpleEndToEndOnnxConversionTest() var onnxTextName = "SimplePipeline.txt"; // Step 2: Convert ML.NET model to ONNX format and save it as a model file and a text file. - TestPipeline(dynamicPipeline, cachedTrainData, onnxFileName, new ColumnComparison[] { new ColumnComparison ("Score", 1) }, onnxTextName, subDir); + TestPipeline(dynamicPipeline, cachedTrainData, onnxFileName, new ColumnComparison[] { new ColumnComparison("Score", 1) }, onnxTextName, subDir); // Step 3: Check ONNX model's text format. This test will be not necessary if Step 2 can run on Linux and // Mac to support cross-platform tests. @@ -194,7 +194,7 @@ public void RegressionTrainersOnnxConversionTest() }; if (NativeLibrary.NativeLibraryExists("MklImports")) { - estimators.Add(mlContext.Regression.Trainers.Ols("Target","FeatureVector")); + estimators.Add(mlContext.Regression.Trainers.Ols("Target", "FeatureVector")); } if (Environment.Is64BitProcess && NativeLibrary.NativeLibraryExists("lib_lightgbm")) { @@ -1020,7 +1020,7 @@ private class HashData [Theory] [CombinatorialData] public void MurmurHashKeyTest( - [CombinatorialValues(DataKind.Byte, DataKind.UInt16, DataKind.UInt32, DataKind.UInt64)]DataKind keyType) + [CombinatorialValues(DataKind.Byte, DataKind.UInt16, DataKind.UInt32, DataKind.UInt64)] DataKind keyType) { var dataFile = DeleteOutputPath("KeysToOnnx.txt"); File.WriteAllLines(dataFile, @@ -1552,7 +1552,7 @@ public void CustomStopWordsRemovingEstimatorOnnxTest() var dataView = mlContext.Data.LoadFromEnumerable(samples); var onnxFileName = $"CustomStopWordsRemovingEstimator.onnx"; - TestPipeline(pipeline, dataView, onnxFileName, new ColumnComparison[] { new ColumnComparison("WordsWithoutStopWords")}); + TestPipeline(pipeline, dataView, onnxFileName, new ColumnComparison[] { new ColumnComparison("WordsWithoutStopWords") }); Done(); } @@ -2086,7 +2086,7 @@ public void OneHotHashEncodingOnnxConversionWithCustomOpSetVersionTest() bool fixZero) { // Shared variables. - var columnsToCompare = new ColumnComparison[] { new ColumnComparison ("Features") }; + var columnsToCompare = new ColumnComparison[] { new ColumnComparison("Features") }; IEstimator pipe; string onnxFileName; @@ -2256,7 +2256,7 @@ private void TestPipeline(EstimatorChain pip var onnxResult = onnxTransformer.Transform(dataView); // Compare all the columns between ML.Net and ONNX. - foreach(var column in columnsToCompare) + foreach (var column in columnsToCompare) { CompareResults(column.Name, column.Name, transformedData, onnxResult, column.Precision, true); } diff --git a/test/Microsoft.ML.Tests/OnnxSequenceTypeWithAttributesTest.cs b/test/Microsoft.ML.Tests/OnnxSequenceTypeWithAttributesTest.cs index 7ca7395050..858c81408f 100644 --- a/test/Microsoft.ML.Tests/OnnxSequenceTypeWithAttributesTest.cs +++ b/test/Microsoft.ML.Tests/OnnxSequenceTypeWithAttributesTest.cs @@ -4,16 +4,16 @@ using System.Collections.Generic; using System.Drawing; +using System.IO; +using System.Linq; using Microsoft.ML.Data; using Microsoft.ML.RunTests; +using Microsoft.ML.TestFramework.Attributes; +using Microsoft.ML.TestFrameworkCommon.Attributes; using Microsoft.ML.Transforms.Image; using Microsoft.ML.Transforms.Onnx; using Xunit; using Xunit.Abstractions; -using System.Linq; -using System.IO; -using Microsoft.ML.TestFramework.Attributes; -using Microsoft.ML.TestFrameworkCommon.Attributes; namespace Microsoft.ML.Tests { @@ -64,7 +64,7 @@ public void OnnxSequenceTypeWithColumnNameAttributeTest() var onnxOut = output.Output.FirstOrDefault(); Assert.True(onnxOut.Count == 3, "Output missing data."); var keys = new List(onnxOut.Keys); - for(var i =0; i < onnxOut.Count; ++i) + for (var i = 0; i < onnxOut.Count; ++i) { Assert.Equal(onnxOut[keys[i]], input.Input[i]); } @@ -112,4 +112,4 @@ public void OnnxSequenceTypeWithColumnNameAttributeTestWithWrongCustomType() } } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs index c189489d56..d44f191fde 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs @@ -396,7 +396,7 @@ public void GetLinearModelWeights() var linearModel = model.LastTransformer.Model; - var weights = linearModel.Weights; + var weights = linearModel.Weights; } [Fact] @@ -527,12 +527,12 @@ private void TextFeaturizationOn(string dataPath) .Append(mlContext.Transforms.Text.ProduceWordBags("BagOfWords", "NormalizedMessage")) // NLP pipeline 2: bag of bigrams, using hashes instead of dictionary indices. - .Append(mlContext.Transforms.Text.ProduceHashedWordBags("BagOfBigrams","NormalizedMessage", + .Append(mlContext.Transforms.Text.ProduceHashedWordBags("BagOfBigrams", "NormalizedMessage", ngramLength: 2, useAllLengths: false)) // NLP pipeline 3: bag of tri-character sequences with TF-IDF weighting. .Append(mlContext.Transforms.Text.TokenizeIntoCharactersAsKeys("MessageChars", "Message")) - .Append(mlContext.Transforms.Text.ProduceNgrams("BagOfTrichar", "MessageChars", + .Append(mlContext.Transforms.Text.ProduceNgrams("BagOfTrichar", "MessageChars", ngramLength: 3, weighting: NgramExtractingEstimator.WeightingCriteria.TfIdf)) // NLP pipeline 4: word embeddings. diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs index 06fc5c1ac9..9132a25e76 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs @@ -30,7 +30,7 @@ public void DecomposableTrainAndPredict() var data = ml.Data.LoadFromTextFile(dataPath, separatorChar: ','); - var pipeline = new ColumnConcatenatingEstimator (ml, "Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") + var pipeline = new ColumnConcatenatingEstimator(ml, "Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") .Append(new ValueToKeyMappingEstimator(ml, "Label"), TransformerScope.TrainTest) .Append(ml.MulticlassClassification.Trainers.SdcaMaximumEntropy( new SdcaMaximumEntropyMulticlassTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1, })) diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs index fee208eef8..cd6558a279 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs @@ -38,7 +38,7 @@ public void Extensibility() j.SepalLength = i.SepalLength; j.SepalWidth = i.SepalWidth; }; - var pipeline = new ColumnConcatenatingEstimator (ml, "Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") + var pipeline = new ColumnConcatenatingEstimator(ml, "Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") .Append(new CustomMappingEstimator(ml, action, null), TransformerScope.TrainTest) .Append(new ValueToKeyMappingEstimator(ml, "Label"), TransformerScope.TrainTest) .Append(ml.MulticlassClassification.Trainers.SdcaMaximumEntropy( diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/PredictAndMetadata.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/PredictAndMetadata.cs index 70519ff0f9..ea1ac17a00 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/PredictAndMetadata.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/PredictAndMetadata.cs @@ -124,7 +124,7 @@ public void MulticlassConfusionMatrixSlotNames() // Check that the SlotNames column is not there. Assert.Null(scoredData2.Schema["Score"].Annotations.Schema.GetColumnOrNull(AnnotationUtils.Kinds.SlotNames)); - + //Assert that the confusion matrix has just ints, as class indicators, in the Annotations of the Count column Assert.Equal("0", metrics2.ConfusionMatrix.PredictedClassesIndicators[0].ToString()); Assert.Equal("1", metrics2.ConfusionMatrix.PredictedClassesIndicators[1].ToString()); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs index 86d5708675..68eec58d50 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs @@ -4,8 +4,8 @@ using System.Linq; using Microsoft.ML.RunTests; -using Microsoft.ML.TestFrameworkCommon; using Microsoft.ML.TestFramework.Attributes; +using Microsoft.ML.TestFrameworkCommon; using Microsoft.ML.Trainers; using Xunit; diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs index 1c55100343..a5f11735df 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs @@ -38,7 +38,7 @@ public void TrainWithInitialPredictor() var firstModel = trainer.Fit(trainData); // Train the second predictor on the same data. - var secondTrainer = ml.BinaryClassification.Trainers.AveragedPerceptron("Label","Features"); + var secondTrainer = ml.BinaryClassification.Trainers.AveragedPerceptron("Label", "Features"); var trainRoles = new RoleMappedData(trainData, label: "Label", feature: "Features"); var finalModel = ((ITrainer)secondTrainer).Train(new TrainContext(trainRoles, initialPredictor: firstModel.Model)); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs index a0d87e2d54..d701aa9876 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/TestApi.cs @@ -330,10 +330,10 @@ public void TestSplitsSchema() }; // Splitting a dataset shouldn't affect its schema - foreach(var split in splits) + foreach (var split in splits) { Assert.Equal(fullInput.Schema.Count, split.Schema.Count); - foreach(var col in fullInput.Schema) + foreach (var col in fullInput.Schema) { Assert.Equal(col.Name, split.Schema[col.Index].Name); } @@ -495,7 +495,7 @@ public void TestSplitsWithSamplingKeyColumn() nameof(Input.TimeSpanStrat), "KeyStrat" }; - foreach(var colname in colnames) + foreach (var colname in colnames) { var cvSplits = mlContext.Data.CrossValidationSplit(inputWithKey, numberOfFolds: 2, samplingKeyColumnName: colname); var idsTest1 = cvSplits[0].TestSet.GetColumn(cvSplits[0].TestSet.Schema[nameof(Input.Id)]); diff --git a/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs b/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs index 4181ce3b19..fa6579a598 100644 --- a/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Tests/Scenarios/WordBagTest.cs b/test/Microsoft.ML.Tests/Scenarios/WordBagTest.cs index 5d2af36bd3..db7a958b22 100644 --- a/test/Microsoft.ML.Tests/Scenarios/WordBagTest.cs +++ b/test/Microsoft.ML.Tests/Scenarios/WordBagTest.cs @@ -2,9 +2,9 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Xunit; using System.Collections.Generic; using Microsoft.ML.Transforms.Text; +using Xunit; namespace Microsoft.ML.Scenarios { @@ -19,12 +19,12 @@ public static void WordBags() new TextData(){ Text = "This is an example to compute bag-of-word features." }, new TextData(){ Text = "ML.NET's ProduceWordBags API produces bag-of-word features from input text." }, new TextData(){ Text = "It does so by first tokenizing text/string into words/tokens then " }, - new TextData(){ Text = "computing n-grams and their neumeric values." }, + new TextData(){ Text = "computing n-grams and their numeric values." }, new TextData(){ Text = "Each position in the output vector corresponds to a particular n-gram." }, new TextData(){ Text = "The value at each position corresponds to," }, new TextData(){ Text = "the number of times n-gram occurred in the data (Tf), or" }, new TextData(){ Text = "the inverse of the number of documents contain the n-gram (Idf)," }, - new TextData(){ Text = "or compute both and multipy together (Tf-Idf)." }, + new TextData(){ Text = "or compute both and multiply together (Tf-Idf)." }, }; var dataview = mlContext.Data.LoadFromEnumerable(samples); @@ -44,7 +44,7 @@ public static void WordBags() 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }); - Assert.Equal(prediction.Text2, new float[] {2, 2, 2, 2, 2, 2, 1, 1}); + Assert.Equal(prediction.Text2, new float[] { 2, 2, 2, 2, 2, 2, 1, 1 }); } [Fact] @@ -56,12 +56,12 @@ public static void WordBagsHash() new TextData(){ Text = "This is an example to compute bag-of-word features." }, new TextData(){ Text = "ML.NET's ProduceWordBags API produces bag-of-word features from input text." }, new TextData(){ Text = "It does so by first tokenizing text/string into words/tokens then " }, - new TextData(){ Text = "computing n-grams and their neumeric values." }, + new TextData(){ Text = "computing n-grams and their numeric values." }, new TextData(){ Text = "Each position in the output vector corresponds to a particular n-gram." }, new TextData(){ Text = "The value at each position corresponds to," }, new TextData(){ Text = "the number of times n-gram occurred in the data (Tf), or" }, new TextData(){ Text = "the inverse of the number of documents contain the n-gram (Idf)," }, - new TextData(){ Text = "or compute both and multipy together (Tf-Idf)." }, + new TextData(){ Text = "or compute both and multiply together (Tf-Idf)." }, }; var dataview = mlContext.Data.LoadFromEnumerable(samples); diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs index 30861521f9..06cfe8ceba 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/TensorflowTests.cs @@ -9,20 +9,20 @@ using System.Linq; using System.Runtime.InteropServices; using Microsoft.ML.Data; -using Microsoft.ML.Vision; +using Microsoft.ML.Internal.Utilities; +using Microsoft.ML.Runtime; +using Microsoft.ML.TensorFlow; using Microsoft.ML.TestFramework; using Microsoft.ML.TestFramework.Attributes; using Microsoft.ML.TestFrameworkCommon; +using Microsoft.ML.Trainers; using Microsoft.ML.Transforms; using Microsoft.ML.Transforms.Image; -using Microsoft.ML.TensorFlow; -using InMemoryImage = Microsoft.ML.Tests.ImageTests.InMemoryImage; +using Microsoft.ML.Vision; using Xunit; using Xunit.Abstractions; using static Microsoft.ML.DataOperationsCatalog; -using Microsoft.ML.Trainers; -using Microsoft.ML.Internal.Utilities; -using Microsoft.ML.Runtime; +using InMemoryImage = Microsoft.ML.Tests.ImageTests.InMemoryImage; namespace Microsoft.ML.Scenarios { @@ -1219,7 +1219,8 @@ public void TensorFlowSaveAndLoadSavedModel() }; // Check the predictions consistency - for (var i = 0; i < predictions.Length; i++) { + for (var i = 0; i < predictions.Length; i++) + { for (var j = 0; j < predictions[i].PredictedScores.Length; j++) Assert.Equal(predictions[i].PredictedScores[j], testPredictions[i].PredictedScores[j], 2); } diff --git a/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs b/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs index ff6dbd456f..1eb9b394fe 100644 --- a/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs +++ b/test/Microsoft.ML.Tests/TensorFlowEstimatorTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -8,10 +8,10 @@ using Microsoft.ML.Data; using Microsoft.ML.Model; using Microsoft.ML.RunTests; +using Microsoft.ML.TensorFlow; using Microsoft.ML.TestFramework.Attributes; using Microsoft.ML.Tools; using Microsoft.ML.Transforms; -using Microsoft.ML.TensorFlow; using Xunit; using Xunit.Abstractions; @@ -44,7 +44,7 @@ private class TestDataXY [VectorType(4)] public float[] B; } - private class TestDataDifferntType + private class TestDataDifferentType { [VectorType(4)] public string[] a; @@ -76,14 +76,14 @@ public void TestSimpleCase() })); var xyData = new List { new TestDataXY() { A = new float[4], B = new float[4] } }; - var stringData = new List { new TestDataDifferntType() { a = new string[4], b = new string[4] } }; + var stringData = new List { new TestDataDifferentType() { a = new string[4], b = new string[4] } }; var sizeData = new List { new TestDataSize() { a = new float[2], b = new float[2] } }; using var model = ML.Model.LoadTensorFlowModel(modelFile); var pipe = model.ScoreTensorFlowModel(new[] { "c" }, new[] { "a", "b" }); var invalidDataWrongNames = ML.Data.LoadFromEnumerable(xyData); - var invalidDataWrongTypes = ML.Data.LoadFromEnumerable( stringData); - var invalidDataWrongVectorSize = ML.Data.LoadFromEnumerable( sizeData); + var invalidDataWrongTypes = ML.Data.LoadFromEnumerable(stringData); + var invalidDataWrongVectorSize = ML.Data.LoadFromEnumerable(sizeData); TestEstimatorCore(pipe, dataView, invalidInput: invalidDataWrongNames); TestEstimatorCore(pipe, dataView, invalidInput: invalidDataWrongTypes); diff --git a/test/Microsoft.ML.Tests/TermEstimatorTests.cs b/test/Microsoft.ML.Tests/TermEstimatorTests.cs index 26be4c8abb..a19f95408c 100644 --- a/test/Microsoft.ML.Tests/TermEstimatorTests.cs +++ b/test/Microsoft.ML.Tests/TermEstimatorTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Tests/TextLoaderTests.cs b/test/Microsoft.ML.Tests/TextLoaderTests.cs index 4f07cebaa7..b3f1445d42 100644 --- a/test/Microsoft.ML.Tests/TextLoaderTests.cs +++ b/test/Microsoft.ML.Tests/TextLoaderTests.cs @@ -220,28 +220,28 @@ public void CanSuccessfullyRetrieveQuotedData() { string dataPath = GetDataPath("QuotingData.csv"); string inputGraph = @" - { - 'Nodes':[ - { + { + 'Nodes':[ + { 'Name':'Data.TextLoader', - 'Inputs':{ + 'Inputs':{ 'InputFile':'$inputFile', - 'Arguments':{ + 'Arguments':{ 'UseThreads':true, 'HeaderFile':null, 'MaxRows':null, 'AllowQuoting':true, 'AllowSparse':false, 'InputSize':null, - 'Separator':[ + 'Separator':[ ',' ], - 'Column':[ - { + 'Column':[ + { 'Name':'ID', 'Type':'R4', - 'Source':[ - { + 'Source':[ + { 'Min':0, 'Max':0, 'AutoEnd':false, @@ -252,11 +252,11 @@ public void CanSuccessfullyRetrieveQuotedData() ], 'KeyCount':null }, - { + { 'Name':'Text', 'Type':'TX', - 'Source':[ - { + 'Source':[ + { 'Min':1, 'Max':1, 'AutoEnd':false, @@ -272,7 +272,7 @@ public void CanSuccessfullyRetrieveQuotedData() 'HasHeader':true } }, - 'Outputs':{ + 'Outputs':{ 'Data':'$data' } } @@ -737,7 +737,7 @@ public void LoaderColumnsFromIrisData(bool useOptionsObject) irisFirstRow["SepalWidth"] = 3.5f; irisFirstRow["PetalLength"] = 1.4f; irisFirstRow["PetalWidth"] = 0.2f; - + var irisFirstRowValues = irisFirstRow.Values.GetEnumerator(); // Simple load @@ -878,7 +878,7 @@ public void TestCommaAsDecimalMarker(bool useCsvVersion) TestCommaAsDecimalMarkerHelper(useCsvVersion); TestCommaAsDecimalMarkerHelper(useCsvVersion); } - + private void TestCommaAsDecimalMarkerHelper(bool useCsvVersion) { // Datasets iris.txt and iris-decimal-marker-as-comma.txt are the exact same, except for their @@ -1012,12 +1012,12 @@ public void TestWrongDecimalMarkerInputs(bool useCommaAsDecimalMarker) using DataViewRowCursor cursor = textData.GetRowCursor(columns); VBuffer featuresPeriod = default; ValueGetter> featuresDelegatePeriod = cursor.GetGetter>(columns[1]); - + // Iterate over each row and check that feature values are NaN. while (cursor.MoveNext()) { featuresDelegatePeriod.Invoke(ref featuresPeriod); - foreach(float feature in featuresPeriod.GetValues().ToArray()) + foreach (float feature in featuresPeriod.GetValues().ToArray()) Assert.Equal(feature, Single.NaN); } } @@ -1094,7 +1094,7 @@ public void TestDifferentDecimalMarkersAtTheSameTime(bool useCorrectPeriod, bool { if (useCorrectPeriod && useCorrectComma) { - // Check that none of the two files loadad NaNs + // Check that none of the two files loaded NaNs // As both of them should have been loaded correctly Assert.Equal(featuresPeriodArray[i], featuresCommaArray[i]); Assert.NotEqual(Single.NaN, featuresPeriodArray[i]); @@ -1135,7 +1135,7 @@ private class IrisPrivateFields [LoadColumn(1)] private float SepalWidth { get; } - public float GetSepalLenght() + public float GetSepalLength() => _sepalLength; public void SetSepalLength(float sepalLength) @@ -1315,7 +1315,7 @@ public void TestLoadTextWithEscapedNewLinesAndEscapeChar(bool useSaved, bool use var ids = new List(); var descriptions = new List(); var animals = new List(); - using(var curs = data.GetRowCursorForAllColumns()) + using (var curs = data.GetRowCursorForAllColumns()) { var idGetter = curs.GetGetter(data.Schema["id"]); var descriptionGetter = curs.GetGetter>(data.Schema["description"]); @@ -1325,7 +1325,7 @@ public void TestLoadTextWithEscapedNewLinesAndEscapeChar(bool useSaved, bool use ReadOnlyMemory description = default; ReadOnlyMemory animal = default; - while(curs.MoveNext()) + while (curs.MoveNext()) { idGetter(ref id); descriptionGetter(ref description); @@ -1346,7 +1346,7 @@ public void TestLoadTextWithEscapedNewLinesAndEscapeChar(bool useSaved, bool use string line; using (var file = new StreamReader(baselinePath)) { - for(int i = 0; i < numRows; i++) + for (int i = 0; i < numRows; i++) { line = file.ReadLine(); Assert.Equal(ids[i], line); @@ -1404,11 +1404,11 @@ public void TestInvalidMultilineCSVQuote() data.Preview(); } - catch(EndOfStreamException) + catch (EndOfStreamException) { threwException = true; } - catch(FormatException) + catch (FormatException) { threwException = true; } @@ -1452,8 +1452,8 @@ public void TestLoadTextWithEmptyFloat(bool useImputeEmptyFloats) }; IDataView baselineDV; - IDataView testDV ; - if(useImputeEmptyFloats) + IDataView testDV; + if (useImputeEmptyFloats) { baselineDV = mlContext.Data.LoadFromTextFile(baselineWithImpute, options); options.MissingRealsAsNaNs = true; diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/CalibratorEstimators.cs b/test/Microsoft.ML.Tests/TrainerEstimators/CalibratorEstimators.cs index a205474878..a5f30a7f37 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/CalibratorEstimators.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/CalibratorEstimators.cs @@ -125,7 +125,8 @@ private sealed class CalibratorTestData } - private void CheckValidCalibratedData(IDataView scoredData, ITransformer transformer){ + private void CheckValidCalibratedData(IDataView scoredData, ITransformer transformer) + { var calibratedData = transformer.Transform(scoredData).Preview(); @@ -184,7 +185,7 @@ public void TestNonStandardCalibratorEstimatorClasses() // Test FixedPlattCalibratorEstimator var calibratorFixedPlattEstimator = new FixedPlattCalibratorEstimator(Env, - scoreColumn: i > 0 ? "ScoreX" : DefaultColumnNames.Score); + scoreColumn: i > 0 ? "ScoreX" : DefaultColumnNames.Score); var calibratorFixedPlattTransformer = calibratorFixedPlattEstimator.Fit(dataArray[i]); calibratorFixedPlattTransformer.Transform(dataArray[i]); @@ -251,12 +252,12 @@ public void TestCalibratorEstimatorBackwardsCompatibility() * var plattCalibratorTransformer = plattCalibratorEstimator.Fit(calibratorTestData.ScoredData); * mlContext.Model.Save(plattCalibratorTransformer, calibratorTestData.ScoredData.Schema, "calibrator-model_VerWritten_0x00010001xyz.zip"); */ - + var modelPath = GetDataPath("backcompat", "Calibrator_Model_VerWritten_0x00010001.zip"); ITransformer oldPlattCalibratorTransformer; using (var fs = File.OpenRead(modelPath)) oldPlattCalibratorTransformer = ML.Model.Load(fs, out var schema); - + var calibratorTestData = GetCalibratorTestData(); var newPlattCalibratorEstimator = new PlattCalibratorEstimator(Env); var newPlattCalibratorTransformer = newPlattCalibratorEstimator.Fit(calibratorTestData.ScoredData); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/FAFMEstimator.cs b/test/Microsoft.ML.Tests/TrainerEstimators/FAFMEstimator.cs index 888882ae5f..1f4b2d50d6 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/FAFMEstimator.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/FAFMEstimator.cs @@ -6,8 +6,8 @@ using System.Collections.Generic; using Microsoft.ML.Data; using Microsoft.ML.RunTests; -using Microsoft.ML.TestFrameworkCommon; using Microsoft.ML.TestFramework.Attributes; +using Microsoft.ML.TestFrameworkCommon; using Microsoft.ML.Trainers; using Xunit; diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs index cc2a229e1c..578e4d5f9e 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs @@ -117,7 +117,7 @@ public void TestLRWithStats() validateStats(linearModel); var modelAndSchemaPath = GetOutputPath("TestLRWithStats.zip"); - + // Save model. ML.Model.Save(transformer, dataView.Schema, modelAndSchemaPath); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs index e68fc06c2c..be95be83d1 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/MatrixFactorizationTests.cs @@ -95,7 +95,7 @@ public void MatrixFactorizationSimpleTrainAndPredict() // MF produce different matrices on different platforms, so check their content on Windows. if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { - if(RuntimeInformation.ProcessArchitecture == Architecture.Arm64) + if (RuntimeInformation.ProcessArchitecture == Architecture.Arm64) Assert.Equal(0.3041052520275116, leftMatrix[0], 4); else Assert.Equal(0.309137582778931, leftMatrix[0], 4); @@ -128,7 +128,7 @@ public void MatrixFactorizationSimpleTrainAndPredict() // Windows and Mac tolerances are set at 1e-7, and Linux tolerance is set at 1e-5. // Here, each build OS has a different MSE baseline metric. While these metrics differ between builds, each build is expected to // produce the same metric. This is because of minor build differences and varying implementations of sub-functions, such as random - // variables that are first obtained with the default random numger generator in libMF C++ libraries. + // variables that are first obtained with the default random number generator in libMF C++ libraries. double windowsAndMacTolerance = Math.Pow(10, -7); double linuxTolerance = Math.Pow(10, -5); if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) @@ -845,4 +845,4 @@ public void InspectMatrixFactorizationModel() Assert.Equal(predictions[i].Score, valuesAtSecondColumn[i], 3); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/OlsLinearRegressionTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/OlsLinearRegressionTests.cs index f59402ff2a..d7d73e6456 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/OlsLinearRegressionTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/OlsLinearRegressionTests.cs @@ -3,8 +3,8 @@ // See the LICENSE file in the project root for more information. using System.Collections.Generic; -using Microsoft.ML.Trainers; using Microsoft.ML.TestFramework.Attributes; +using Microsoft.ML.Trainers; using Xunit; namespace Microsoft.ML.Tests.TrainerEstimators diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/OnlineLinearTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/OnlineLinearTests.cs index dc476591bc..f092910aae 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/OnlineLinearTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/OnlineLinearTests.cs @@ -3,10 +3,10 @@ // See the LICENSE file in the project root for more information. using Microsoft.ML; -using Microsoft.ML.Trainers; -using Xunit; using Microsoft.ML.Data; using Microsoft.ML.TestFrameworkCommon; +using Microsoft.ML.Trainers; +using Xunit; namespace Microsoft.ML.Tests.TrainerEstimators { @@ -40,7 +40,7 @@ public void OnlineLinearWorkout() var binaryTrainData = binaryPipe.Fit(binaryData).Transform(binaryData); var apTrainer = ML.BinaryClassification.Trainers.AveragedPerceptron( - new AveragedPerceptronTrainer.Options{ LearningRate = 0.5f }); + new AveragedPerceptronTrainer.Options { LearningRate = 0.5f }); TestEstimatorCore(apTrainer, binaryTrainData); var apModel = apTrainer.Fit(binaryTrainData); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs index 0b674e1c20..5b10ca358c 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs @@ -11,7 +11,7 @@ namespace Microsoft.ML.Tests.TrainerEstimators { - public partial class TrainerEstimators + public partial class TrainerEstimators { [Fact] public void SdcaWorkout() @@ -142,7 +142,7 @@ public void SdcaLogisticRegressionWithWeight() bool sameScores = true; for (int i = 0; i < scores1.Length; i++) { - if(!CompareNumbersWithTolerance(scores1[i], scores2[i], logFailure: false)) + if (!CompareNumbersWithTolerance(scores1[i], scores2[i], logFailure: false)) { sameScores = false; break; diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/SymSgdClassificationTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/SymSgdClassificationTests.cs index 02f1316a18..9a2d0aaf13 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/SymSgdClassificationTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/SymSgdClassificationTests.cs @@ -5,8 +5,8 @@ using System.Linq; using Microsoft.ML.Data; using Microsoft.ML.Runtime; -using Microsoft.ML.Trainers; using Microsoft.ML.TestFramework.Attributes; +using Microsoft.ML.Trainers; using Xunit; namespace Microsoft.ML.Tests.TrainerEstimators @@ -62,4 +62,4 @@ public void TestEstimatorSymSgdInitPredictor() Done(); } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/TreeEnsembleFeaturizerTest.cs b/test/Microsoft.ML.Tests/TrainerEstimators/TreeEnsembleFeaturizerTest.cs index 68dee8a5b9..e3e3456108 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/TreeEnsembleFeaturizerTest.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/TreeEnsembleFeaturizerTest.cs @@ -37,8 +37,12 @@ public void TreeEnsembleFeaturizerOutputSchemaTest() const string treesColumnName = "MyTrees"; const string leavesColumnName = "MyLeaves"; const string pathsColumnName = "MyPaths"; - var args = new TreeEnsembleFeaturizerBindableMapper.Arguments() { - TreesColumnName = treesColumnName, LeavesColumnName = leavesColumnName, PathsColumnName = pathsColumnName }; + var args = new TreeEnsembleFeaturizerBindableMapper.Arguments() + { + TreesColumnName = treesColumnName, + LeavesColumnName = leavesColumnName, + PathsColumnName = pathsColumnName + }; var treeFeaturizer = new TreeEnsembleFeaturizerBindableMapper(Env, args, model.Model); // To get output schema, we need to create RoleMappedSchema for calling Bind(...). diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/TreeEstimators.cs b/test/Microsoft.ML.Tests/TrainerEstimators/TreeEstimators.cs index e503133e91..6188b7e34b 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/TreeEstimators.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/TreeEstimators.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -593,7 +593,7 @@ public void LightGbmMulticlassEstimatorCompareOvaUsingSigmoids() [LightGBMFact] public void LightGbmMulticlassEstimatorCompareOvaUsingDifferentSigmoids() { - // Run native implemenation twice, see that results are different with different sigmoid values. + // Run native implementation twice, see that results are different with different sigmoid values. var firstSigmoidScale = .790; var secondSigmoidScale = .2; @@ -660,7 +660,7 @@ public void LightGbmMulticlassEstimatorCompareSoftMax() public void LightGbmMulticlassEstimatorCompareUnbalanced() { // Train ML.NET LightGBM and native LightGBM and apply the trained models to the training set. - LightGbmHelper(useSoftmax: true, sigmoid: .5, out string modelString, out List mlnetPredictions, out double[] nativeResult1, out double[] nativeResult0, unbalancedSets:true); + LightGbmHelper(useSoftmax: true, sigmoid: .5, out string modelString, out List mlnetPredictions, out double[] nativeResult1, out double[] nativeResult0, unbalancedSets: true); // The i-th predictor returned by LightGBM produces the raw score, denoted by z_i, of the i-th class. // Assume that we have n classes in total. The i-th class probability can be computed via @@ -723,7 +723,7 @@ public void LightGbmFitMoreThanOnce() .LightGbm()); var numClasses = 3; - var dataPoints = GenerateRandomDataPoints(100, numClasses:numClasses); + var dataPoints = GenerateRandomDataPoints(100, numClasses: numClasses); var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints); var model = pipeline.Fit(trainingData); var numOfSubParameters = (model.LastTransformer.Model as OneVersusAllModelParameters).SubModelParameters.Length; @@ -801,7 +801,7 @@ private static void CheckSummaryRowTreeNode(SummaryDataRow row, int treeIndex, d Assert.Equal(row.CategoricalSplitFlags, tree.CategoricalSplitFlags[nodeId]); Assert.Equal(0, row.LeafValues); Assert.Equal(row.SplitGains, tree.SplitGains[nodeId]); - if(tree.GetCategoricalSplitFeaturesAt(nodeId).Count() > 0) + if (tree.GetCategoricalSplitFeaturesAt(nodeId).Count() > 0) Assert.Equal(row.CategoricalSplitFeatures, tree.GetCategoricalSplitFeaturesAt(nodeId).ToArray()); else Assert.Null(row.CategoricalSplitFeatures); @@ -882,7 +882,7 @@ public void FastTreeRegressorTestSummary() { var dataView = GetRegressionPipeline(); var trainer = ML.Regression.Trainers.FastTree( - new FastTreeRegressionTrainer.Options { NumberOfTrees = 10, NumberOfThreads = 1, NumberOfLeaves = 5}); + new FastTreeRegressionTrainer.Options { NumberOfTrees = 10, NumberOfThreads = 1, NumberOfLeaves = 5 }); var transformer = trainer.Fit(dataView); @@ -900,7 +900,7 @@ public void FastForestRegressorTestSummary() { var dataView = GetRegressionPipeline(); var trainer = ML.Regression.Trainers.FastForest( - new FastForestRegressionTrainer.Options { NumberOfTrees = 10, NumberOfThreads = 1, NumberOfLeaves = 5}); + new FastForestRegressionTrainer.Options { NumberOfTrees = 10, NumberOfThreads = 1, NumberOfLeaves = 5 }); var transformer = trainer.Fit(dataView); @@ -918,7 +918,7 @@ public void FastTreeTweedieRegressorTestSummary() { var dataView = GetRegressionPipeline(); var trainer = ML.Regression.Trainers.FastTreeTweedie( - new FastTreeTweedieTrainer.Options { NumberOfTrees = 10, NumberOfThreads = 1, NumberOfLeaves = 5}); + new FastTreeTweedieTrainer.Options { NumberOfTrees = 10, NumberOfThreads = 1, NumberOfLeaves = 5 }); var transformer = trainer.Fit(dataView); @@ -939,10 +939,10 @@ public void LightGbmRegressorTestSummary() // Attention: Do not set NumberOfThreads here, left this to use default value to avoid test crash. // Details can be found here: https://github.com/dotnet/machinelearning/pull/4918 var trainer = ML.Regression.Trainers.LightGbm( - new LightGbmRegressionTrainer.Options - { - NumberOfIterations = 10, - NumberOfLeaves = 5 + new LightGbmRegressionTrainer.Options + { + NumberOfIterations = 10, + NumberOfLeaves = 5 }); var transformer = trainer.Fit(dataView); @@ -961,7 +961,7 @@ public void FastTreeBinaryClassificationTestSummary() { var (pipeline, dataView) = GetBinaryClassificationPipeline(); var estimator = pipeline.Append(ML.BinaryClassification.Trainers.FastTree( - new FastTreeBinaryTrainer.Options { NumberOfTrees = 2, NumberOfThreads = 1, NumberOfLeaves = 5})); + new FastTreeBinaryTrainer.Options { NumberOfTrees = 2, NumberOfThreads = 1, NumberOfLeaves = 5 })); var transformer = estimator.Fit(dataView); @@ -1000,11 +1000,11 @@ public void LightGbmBinaryClassificationTestSummary() // Attention: Do not set NumberOfThreads here, left this to use default value to avoid test crash. // Details can be found here: https://github.com/dotnet/machinelearning/pull/4918 var trainer = pipeline.Append(ML.BinaryClassification.Trainers.LightGbm( - new LightGbmBinaryTrainer.Options - { - NumberOfIterations = 10, - NumberOfLeaves = 5, - UseCategoricalSplit = true + new LightGbmBinaryTrainer.Options + { + NumberOfIterations = 10, + NumberOfLeaves = 5, + UseCategoricalSplit = true })); var transformer = trainer.Fit(dataView); diff --git a/test/Microsoft.ML.Tests/Transformers/CharTokenizeTests.cs b/test/Microsoft.ML.Tests/Transformers/CharTokenizeTests.cs index 112f0a6233..1f1f792da5 100644 --- a/test/Microsoft.ML.Tests/Transformers/CharTokenizeTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/CharTokenizeTests.cs @@ -39,11 +39,11 @@ public void CharTokenizeWorkout() { var data = new[] { new TestClass() { A = "This is a good sentence.", B = new string[2] { "Much words", "Wow So Cool" } } }; var dataView = ML.Data.LoadFromEnumerable(data); - var invalidData = new[] { new TestWrong() { A = 1, B = new float[2] { 2,3} } }; + var invalidData = new[] { new TestWrong() { A = 1, B = new float[2] { 2, 3 } } }; var invalidDataView = ML.Data.LoadFromEnumerable(invalidData); var pipe = new TokenizingByCharactersEstimator(Env, columns: new[] { ("TokenizeA", "A"), ("TokenizeB", "B") }); - TestEstimatorCore(pipe, dataView, invalidInput:invalidDataView); + TestEstimatorCore(pipe, dataView, invalidInput: invalidDataView); Done(); } diff --git a/test/Microsoft.ML.Tests/Transformers/ConcatTests.cs b/test/Microsoft.ML.Tests/Transformers/ConcatTests.cs index 2412d6e58b..59ac4db40e 100644 --- a/test/Microsoft.ML.Tests/Transformers/ConcatTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/ConcatTests.cs @@ -27,7 +27,7 @@ public void TestConcatNoInputColumns() { var pipe = ML.Transforms.Concatenate("Features"); } - catch(Exception ex) + catch (Exception ex) { Assert.Contains("Input columns not specified", ex.Message); thrown = true; @@ -130,8 +130,8 @@ static DataViewType GetType(DataViewSchema schema, string name) var columns = concater.Columns; var colEnumerator = columns.GetEnumerator(); colEnumerator.MoveNext(); - Assert.True(colEnumerator.Current.outputColumnName == "f2" && - colEnumerator.Current.inputColumnNames[0] == "float1" && + Assert.True(colEnumerator.Current.outputColumnName == "f2" && + colEnumerator.Current.inputColumnNames[0] == "float1" && colEnumerator.Current.inputColumnNames[1] == "float1"); colEnumerator.MoveNext(); Assert.True(colEnumerator.Current.outputColumnName == "f3" && @@ -144,7 +144,7 @@ static DataViewType GetType(DataViewSchema schema, string name) t = GetType(data.Schema, "f3"); Assert.True(t is VectorDataViewType vt3 && vt3.ItemType == NumberDataViewType.Single && vt3.Size == 5); - data = ML.Transforms.SelectColumns("f2", "f3" ).Fit(data).Transform(data); + data = ML.Transforms.SelectColumns("f2", "f3").Fit(data).Transform(data); var subdir = Path.Combine("Transform", "Concat"); var outputPath = GetOutputPath(subdir, "Concat2.tsv"); diff --git a/test/Microsoft.ML.Tests/Transformers/DateTimeTransformerTests.cs b/test/Microsoft.ML.Tests/Transformers/DateTimeTransformerTests.cs index 10053c367c..743bb21a61 100644 --- a/test/Microsoft.ML.Tests/Transformers/DateTimeTransformerTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/DateTimeTransformerTests.cs @@ -2,13 +2,13 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. +using System; using Microsoft.ML.Data; -using Microsoft.ML.RunTests; using Microsoft.ML.Featurizers; -using System; +using Microsoft.ML.RunTests; +using Microsoft.ML.TestFramework.Attributes; using Xunit; using Xunit.Abstractions; -using Microsoft.ML.TestFramework.Attributes; namespace Microsoft.ML.Tests.Transformers { @@ -138,7 +138,7 @@ public void CanUseDateFromColumnDateTimeType() { // Date - 2025 June 30 MLContext mlContext = new MLContext(1); - var dataList = new[] { new { date = new DateTime(2025,6,30)} }; + var dataList = new[] { new { date = new DateTime(2025, 6, 30) } }; var data = mlContext.Data.LoadFromEnumerable(dataList); // Build the pipeline, fit, and transform it. diff --git a/test/Microsoft.ML.Tests/Transformers/GroupUngroup.cs b/test/Microsoft.ML.Tests/Transformers/GroupUngroup.cs index 50a0414cca..0c8f75f7f4 100644 --- a/test/Microsoft.ML.Tests/Transformers/GroupUngroup.cs +++ b/test/Microsoft.ML.Tests/Transformers/GroupUngroup.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Tests/Transformers/HashTests.cs b/test/Microsoft.ML.Tests/Transformers/HashTests.cs index 8864daa1d8..8444a05d00 100644 --- a/test/Microsoft.ML.Tests/Transformers/HashTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/HashTests.cs @@ -235,7 +235,7 @@ private void HashTestPositiveIntegerCore32Bits(ulong value, uint expected, uint if (value <= ushort.MaxValue) { HashTestCore((ushort)value, NumberDataViewType.UInt16, expected, expectedOrdered, expectedOrdered3, expectedCombined, expectedCombinedSparse); - HashTestCore((ushort)value, new KeyDataViewType(typeof(ushort),ushort.MaxValue - 1), eKey, eoKey, e3Key, ecKey, 0); + HashTestCore((ushort)value, new KeyDataViewType(typeof(ushort), ushort.MaxValue - 1), eKey, eoKey, e3Key, ecKey, 0); } if (value <= uint.MaxValue) { @@ -263,7 +263,7 @@ private void HashTestPositiveIntegerCore64Bits(ulong value, uint expected, uint // Next let's check signed numbers. if (value <= long.MaxValue) - HashTestCore((long)value, NumberDataViewType.Int64, expected, expectedOrdered, expectedOrdered3, expectedCombined, expectedCombinedSparse); + HashTestCore((long)value, NumberDataViewType.Int64, expected, expectedOrdered, expectedOrdered3, expectedCombined, expectedCombinedSparse); // ulong keys HashTestCore(value, new KeyDataViewType(typeof(ulong), int.MaxValue - 1), eKey, eoKey, e3Key, ecKey, 0); @@ -372,8 +372,8 @@ public void TestBackCompatNoCombineOption() [Fact] public void TestCombineLengthOneVector() { - var data = new[] - { + var data = new[] + { new TestClass() { A = 1, B = 2, C = 3 }, new TestClass() { A = 4, B = 5, C = 6 }, new TestClass() { A = float.NaN, B = 3, C = 12 } diff --git a/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs b/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs index 5b2d0a4097..7f8e6ea4bb 100644 --- a/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/KeyToVectorEstimatorTests.cs @@ -212,7 +212,7 @@ public void TestOldSavingAndLoading() var transformer = est.Fit(dataView); dataView = transformer.Transform(dataView); var pipe = ML.Transforms.Conversion.MapKeyToVector( - new KeyToVectorMappingEstimator.ColumnOptions("CatA", "TermA",false), + new KeyToVectorMappingEstimator.ColumnOptions("CatA", "TermA", false), new KeyToVectorMappingEstimator.ColumnOptions("CatB", "TermB", true) ); var result = pipe.Fit(dataView).Transform(dataView); diff --git a/test/Microsoft.ML.Tests/Transformers/LineParserTests.cs b/test/Microsoft.ML.Tests/Transformers/LineParserTests.cs index e911dc9347..b83c26228b 100644 --- a/test/Microsoft.ML.Tests/Transformers/LineParserTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/LineParserTests.cs @@ -62,7 +62,7 @@ public void LineParserAndCulture() { for (int i = 0; i < arraySize; i++) data[i] = rand.NextSingle() * 50 - 25; - var result = LineParser.ParseKeyThenNumbers("word" + separator[sep % 2] + string.Join(separator[sep / 2], data.Select(x=>x.ToString("G9"))), false); + var result = LineParser.ParseKeyThenNumbers("word" + separator[sep % 2] + string.Join(separator[sep / 2], data.Select(x => x.ToString("G9"))), false); Assert.True(result.isSuccess); Assert.Equal("word", result.key); for (int i = 0; i < arraySize; i++) diff --git a/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs b/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs index 0abdb933a6..a3a29bc054 100644 --- a/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/NormalizerTests.cs @@ -6,6 +6,7 @@ using System.Collections.Generic; using System.Collections.Immutable; using System.IO; +using System.Linq; using Microsoft.ML.Data; using Microsoft.ML.Data.IO; using Microsoft.ML.Experimental; @@ -13,13 +14,12 @@ using Microsoft.ML.RunTests; using Microsoft.ML.TestFramework.Attributes; using Microsoft.ML.TestFrameworkCommon; +using Microsoft.ML.TestFrameworkCommon.Attributes; using Microsoft.ML.Tools; using Microsoft.ML.Transforms; -using System.Linq; using Xunit; using Xunit.Abstractions; using static Microsoft.ML.Transforms.NormalizingTransformer; -using Microsoft.ML.TestFrameworkCommon.Attributes; namespace Microsoft.ML.Tests.Transformers { @@ -430,7 +430,7 @@ public void NormalizerParametersMultiColumnApi() robustScalerEstimator = context.Transforms.NormalizeRobustScaling( new[] {new InputOutputColumnPair("float1rbs", "float1"), new InputOutputColumnPair("float4rbs", "float4"), new InputOutputColumnPair("double1rbs", "double1"), new InputOutputColumnPair("double4rbs", "double4")} - ,centerData: false); + , centerData: false); robustScalerTransformer = robustScalerEstimator.Fit(data); diff --git a/test/Microsoft.ML.Tests/Transformers/RffTests.cs b/test/Microsoft.ML.Tests/Transformers/RffTests.cs index bfd4555b19..522a47e4b3 100644 --- a/test/Microsoft.ML.Tests/Transformers/RffTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/RffTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/test/Microsoft.ML.Tests/Transformers/SelectColumnsTests.cs b/test/Microsoft.ML.Tests/Transformers/SelectColumnsTests.cs index b1177615c3..f5defa2995 100644 --- a/test/Microsoft.ML.Tests/Transformers/SelectColumnsTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/SelectColumnsTests.cs @@ -98,21 +98,21 @@ public void TestSelectDrop() Assert.Equal(0, bIdx); Assert.False(foundColumnC); } - + [Fact] public void TestSelectWorkout() { var data = new[] { new TestClass() { A = 1, B = 2, C = 3, }, new TestClass() { A = 4, B = 5, C = 6 } }; - var invalidData = new [] { new TestClass2 { D = 3, E = 5} }; + var invalidData = new[] { new TestClass2 { D = 3, E = 5 } }; var dataView = ML.Data.LoadFromEnumerable(data); var invalidDataView = ML.Data.LoadFromEnumerable(invalidData); // Workout on keep columns - var est = ML.Transforms.SelectColumns(new[] {"A", "B"}); + var est = ML.Transforms.SelectColumns(new[] { "A", "B" }); TestEstimatorCore(est, validFitInput: dataView, invalidInput: invalidDataView); // Workout on select columns with hidden: true - est = ML.Transforms.SelectColumns(new[] {"A", "B"}, true); + est = ML.Transforms.SelectColumns(new[] { "A", "B" }, true); TestEstimatorCore(est, validFitInput: dataView, invalidInput: invalidDataView); } @@ -130,7 +130,7 @@ public void TestSelectColumnsWithSameName() { var data = new[] { new TestClass() { A = 1, B = 2, C = 3, }, new TestClass() { A = 4, B = 5, C = 6 } }; var dataView = ML.Data.LoadFromEnumerable(data); - var est = new ColumnCopyingEstimator(Env, new[] {("A", "A"), ("B", "B")}); + var est = new ColumnCopyingEstimator(Env, new[] { ("A", "A"), ("B", "B") }); var chain = est.Append(ColumnSelectingEstimator.KeepColumns(Env, "C", "A")); var transformer = chain.Fit(dataView); var result = transformer.Transform(dataView); @@ -153,8 +153,8 @@ public void TestSelectColumnsWithKeepHidden() { var data = new[] { new TestClass() { A = 1, B = 2, C = 3, }, new TestClass() { A = 4, B = 5, C = 6 } }; var dataView = ML.Data.LoadFromEnumerable(data); - var est = new ColumnCopyingEstimator(Env, new[] {("A", "A"), ("B", "B")}); - var chain = est.Append(ML.Transforms.SelectColumns(new[] {"B", "A" }, true)); + var est = new ColumnCopyingEstimator(Env, new[] { ("A", "A"), ("B", "B") }); + var chain = est.Append(ML.Transforms.SelectColumns(new[] { "B", "A" }, true)); var transformer = chain.Fit(dataView); var result = transformer.Transform(dataView); @@ -195,7 +195,7 @@ public void TestSelectSavingAndLoadingWithNoKeepHidden() { var data = new[] { new TestClass() { A = 1, B = 2, C = 3, }, new TestClass() { A = 4, B = 5, C = 6 } }; var dataView = ML.Data.LoadFromEnumerable(data); - var est = new ColumnCopyingEstimator(Env, new[] {("A", "A"), ("B", "B")}).Append( + var est = new ColumnCopyingEstimator(Env, new[] { ("A", "A"), ("B", "B") }).Append( ML.Transforms.SelectColumns(new[] { "A", "B" }, false)); var transformer = est.Fit(dataView); using (var ms = new MemoryStream()) @@ -215,7 +215,7 @@ public void TestSelectBackCompatDropColumns() { // Model generated with: xf=drop{col=A} // Expected output: Features Label B C - var data = new[] { new TestClass3() { Label="foo", Features="bar", A = 1, B = 2, C = 3, } }; + var data = new[] { new TestClass3() { Label = "foo", Features = "bar", A = 1, B = 2, C = 3, } }; var dataView = ML.Data.LoadFromEnumerable(data); string dropModelPath = GetDataPath("backcompat/drop-model.zip"); using (FileStream fs = File.OpenRead(dropModelPath)) @@ -243,7 +243,7 @@ public void TestSelectBackCompatKeepColumns() { // Model generated with: xf=keep{col=Label col=Features col=A col=B} // Expected output: Label Features A B - var data = new[] { new TestClass3() { Label="foo", Features="bar", A = 1, B = 2, C = 3, } }; + var data = new[] { new TestClass3() { Label = "foo", Features = "bar", A = 1, B = 2, C = 3, } }; var dataView = ML.Data.LoadFromEnumerable(data); string dropModelPath = GetDataPath("backcompat/keep-model.zip"); using (FileStream fs = File.OpenRead(dropModelPath)) @@ -265,13 +265,13 @@ public void TestSelectBackCompatKeepColumns() Assert.False(foundColumnC); } } - + [Fact] public void TestSelectBackCompatChooseColumns() { // Model generated with: xf=choose{col=Label col=Features col=A col=B} // Output expected is Label Features A B - var data = new[] { new TestClass3() { Label="foo", Features="bar", A = 1, B = 2, C = 3, } }; + var data = new[] { new TestClass3() { Label = "foo", Features = "bar", A = 1, B = 2, C = 3, } }; var dataView = ML.Data.LoadFromEnumerable(data); string dropModelPath = GetDataPath("backcompat/choose-model.zip"); using (FileStream fs = File.OpenRead(dropModelPath)) @@ -299,7 +299,7 @@ public void TestSelectBackCompatChooseColumnsWithKeep() { // Model generated with: xf=copy{col=A:A col=B:B} xf=choose{col=Label col=Features col=A col=B hidden=keep} // Output expected is Label Features A A B B - var data = new[] { new TestClass3() { Label="foo", Features="bar", A = 1, B = 2, C = 3, } }; + var data = new[] { new TestClass3() { Label = "foo", Features = "bar", A = 1, B = 2, C = 3, } }; var dataView = ML.Data.LoadFromEnumerable(data); string chooseModelPath = GetDataPath("backcompat/choose-keep-model.zip"); using (FileStream fs = File.OpenRead(chooseModelPath)) diff --git a/test/Microsoft.ML.Tests/Transformers/TextFeaturizerTests.cs b/test/Microsoft.ML.Tests/Transformers/TextFeaturizerTests.cs index 97ccf2bc14..a32c003c94 100644 --- a/test/Microsoft.ML.Tests/Transformers/TextFeaturizerTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/TextFeaturizerTests.cs @@ -210,7 +210,7 @@ public void TextFeaturizerWithL2NormTest() var options = new TextFeaturizingEstimator.Options() { - CharFeatureExtractor = new WordBagEstimator.Options() { NgramLength = 1}, + CharFeatureExtractor = new WordBagEstimator.Options() { NgramLength = 1 }, Norm = TextFeaturizingEstimator.NormFunction.L2, OutputTokensColumnName = "OutputTokens" }; @@ -684,7 +684,7 @@ public void LdaWorkout() Append(new LatentDirichletAllocationEstimator(env, "topics", "bag_of_words", 10, maximumNumberOfIterations: 10, resetRandomGenerator: true)); - // Diabling this check due to the following issue with consitency of output. + // Disabling this check due to the following issue with consitency of output. // `seed` specified in ConsoleEnvironment has no effect. // https://github.com/dotnet/machinelearning/issues/1004 // On single box, setting `s.ResetRandomGenerator = true` works but fails on build server @@ -705,7 +705,7 @@ public void LdaWorkout() Assert.Equal(10, (savedData.Schema[0].Type as VectorDataViewType)?.Size); } - // Diabling this check due to the following issue with consitency of output. + // Disabling this check due to the following issue with consitency of output. // `seed` specified in ConsoleEnvironment has no effect. // https://github.com/dotnet/machinelearning/issues/1004 // On single box, setting `s.ResetRandomGenerator = true` works but fails on build server diff --git a/test/Microsoft.ML.Tests/Transformers/TextNormalizer.cs b/test/Microsoft.ML.Tests/Transformers/TextNormalizer.cs index 93b7f0612e..bb4c0bdb4c 100644 --- a/test/Microsoft.ML.Tests/Transformers/TextNormalizer.cs +++ b/test/Microsoft.ML.Tests/Transformers/TextNormalizer.cs @@ -60,9 +60,9 @@ public void TextNormalizerWorkout() new TextNormalizingEstimator(ML, keepPunctuations: false, columns: new[] { ("NoPuncText", "text") })); var outputPath = GetOutputPath("Text", "Normalized.tsv"); - var savedData = ML.Data.TakeRows(pipeVariations.Fit(dataView).Transform(dataView), 5); - using (var fs = File.Create(outputPath)) - ML.Data.SaveAsText(savedData, fs, headerRow: true, keepHidden: true); + var savedData = ML.Data.TakeRows(pipeVariations.Fit(dataView).Transform(dataView), 5); + using (var fs = File.Create(outputPath)) + ML.Data.SaveAsText(savedData, fs, headerRow: true, keepHidden: true); CheckEquality("Text", "Normalized.tsv"); Done(); diff --git a/test/Microsoft.ML.Tests/Transformers/TimeSeriesImputerTests.cs b/test/Microsoft.ML.Tests/Transformers/TimeSeriesImputerTests.cs index ccce0678c3..7c447ed16e 100644 --- a/test/Microsoft.ML.Tests/Transformers/TimeSeriesImputerTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/TimeSeriesImputerTests.cs @@ -2,15 +2,15 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using Microsoft.ML.Data; -using Microsoft.ML.RunTests; -using Microsoft.ML.Featurizers; using System; -using Xunit; -using Xunit.Abstractions; using System.Drawing.Printing; using System.Linq; +using Microsoft.ML.Data; +using Microsoft.ML.Featurizers; +using Microsoft.ML.RunTests; using Microsoft.ML.TestFramework.Attributes; +using Xunit; +using Xunit.Abstractions; namespace Microsoft.ML.Tests.Transformers { @@ -43,7 +43,7 @@ private class TimeSeriesOneGrainFloatInput public string grainA; public float dataA; } - + private class TimeSeriesOneGrainStringInput { public long date; @@ -55,15 +55,15 @@ private class TimeSeriesOneGrainStringInput public void NotImputeOneColumn() { MLContext mlContext = new MLContext(1); - var dataList = new[] { - new TimeSeriesOneGrainInput() { date = 25, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 }, - new TimeSeriesOneGrainInput() { date = 26, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 }, - new TimeSeriesOneGrainInput() { date = 28, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 } + var dataList = new[] { + new TimeSeriesOneGrainInput() { date = 25, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 }, + new TimeSeriesOneGrainInput() { date = 26, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 }, + new TimeSeriesOneGrainInput() { date = 28, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 } }; var data = mlContext.Data.LoadFromEnumerable(dataList); // Build the pipeline, fit, and transform it. - var pipeline = mlContext.Transforms.ReplaceMissingTimeSeriesValues("date", new string[] { "grainA" }, new string[] { "dataB"}); + var pipeline = mlContext.Transforms.ReplaceMissingTimeSeriesValues("date", new string[] { "grainA" }, new string[] { "dataB" }); var model = pipeline.Fit(data); var output = model.Transform(data); var schema = output.Schema; @@ -90,20 +90,20 @@ public void NotImputeOneColumn() TestEstimatorCore(pipeline, data); Done(); } - + [FeaturizersFact] public void ImputeOnlyOneColumn() { MLContext mlContext = new MLContext(1); - var dataList = new[] { - new TimeSeriesOneGrainInput() { date = 25, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 }, - new TimeSeriesOneGrainInput() { date = 26, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 }, - new TimeSeriesOneGrainInput() { date = 28, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 } + var dataList = new[] { + new TimeSeriesOneGrainInput() { date = 25, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 }, + new TimeSeriesOneGrainInput() { date = 26, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 }, + new TimeSeriesOneGrainInput() { date = 28, grainA = "A", dataA = 1, dataB = 2.0f, dataC = 5 } }; var data = mlContext.Data.LoadFromEnumerable(dataList); // Build the pipeline, fit, and transform it. - var pipeline = mlContext.Transforms.ReplaceMissingTimeSeriesValues("date", new string[] { "grainA" }, new string[] { "dataB"}, TimeSeriesImputerEstimator.FilterMode.Include); + var pipeline = mlContext.Transforms.ReplaceMissingTimeSeriesValues("date", new string[] { "grainA" }, new string[] { "dataB" }, TimeSeriesImputerEstimator.FilterMode.Include); var model = pipeline.Fit(data); var output = model.Transform(data); var schema = output.Schema; @@ -189,7 +189,7 @@ public void Forwardfill() TestEstimatorCore(pipeline, data); Done(); } - + [FeaturizersFact] public void DateTimeSupportForwardfill() { @@ -244,7 +244,7 @@ public void DateTimeSupportForwardfill() TestEstimatorCore(pipeline, data); Done(); } - + [FeaturizersFact] public void EntryPoint() { @@ -254,9 +254,10 @@ public void EntryPoint() new { ts = 3L, grain = 1970, c3 = 15, c4 = 16}, new { ts = 5L, grain = 1970, c3 = 20, c4 = 19} }; - + var data = mlContext.Data.LoadFromEnumerable(dataList); - TimeSeriesImputerEstimator.Options options = new TimeSeriesImputerEstimator.Options() { + TimeSeriesImputerEstimator.Options options = new TimeSeriesImputerEstimator.Options() + { TimeSeriesColumn = "ts", GrainColumns = new[] { "grain" }, FilterColumns = new[] { "c3", "c4" }, diff --git a/test/Microsoft.ML.Tests/Transformers/ValueMappingTests.cs b/test/Microsoft.ML.Tests/Transformers/ValueMappingTests.cs index 0ecd198aac..93ad2d6492 100644 --- a/test/Microsoft.ML.Tests/Transformers/ValueMappingTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/ValueMappingTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. @@ -370,7 +370,7 @@ public void ValueMappingOutputSchema() var est = ML.Transforms.Conversion.MapValue(keyValuePairs, new[] { new InputOutputColumnPair("D", "A"), new InputOutputColumnPair("E", "B"), new InputOutputColumnPair("F", "C") }); - var outputSchema = est.GetOutputSchema(SchemaShape.Create(dataView.Schema)); + var outputSchema = est.GetOutputSchema(SchemaShape.Create(dataView.Schema)); Assert.Equal(6, outputSchema.Count()); Assert.True(outputSchema.TryFindColumn("D", out SchemaShape.Column dColumn)); @@ -402,7 +402,7 @@ public void ValueMappingWithValuesAsKeyTypesOutputSchema() var estimator = ML.Transforms.Conversion.MapValue(keyValuePairs, true, new[] { new InputOutputColumnPair("D", "A"), new InputOutputColumnPair("E", "B"), new InputOutputColumnPair("F", "C") }); - var outputSchema = estimator.GetOutputSchema(SchemaShape.Create(dataView.Schema)); + var outputSchema = estimator.GetOutputSchema(SchemaShape.Create(dataView.Schema)); Assert.Equal(6, outputSchema.Count()); Assert.True(outputSchema.TryFindColumn("D", out SchemaShape.Column dColumn)); Assert.True(outputSchema.TryFindColumn("E", out SchemaShape.Column eColumn)); @@ -610,7 +610,7 @@ public void ValueMappingValueTypeIsVectorWorkout() var badData = new[] { new TestWrong() { A = "bar", B = 1.2f } }; var badDataView = ML.Data.LoadFromEnumerable(badData); - var keyValuePairs = new List>() { + var keyValuePairs = new List>() { new KeyValuePair("foo", new int[] {2, 3, 4 }), new KeyValuePair("bar", new int[] {100, 200 }), new KeyValuePair("test", new int[] {400, 500, 600, 700 }), @@ -630,11 +630,11 @@ public void ValueMappingInputIsVectorWorkout() var badData = new[] { new TestWrong() { B = 1.2f } }; var badDataView = ML.Data.LoadFromEnumerable(badData); - var keyValuePairs = new List,int>>() { + var keyValuePairs = new List, int>>() { new KeyValuePair,int>("foo".AsMemory(), 1), new KeyValuePair,int>("bar".AsMemory(), 2), new KeyValuePair,int>("test".AsMemory(), 3), - new KeyValuePair,int>("wahoo".AsMemory(), 4) + new KeyValuePair,int>("wahoo".AsMemory(), 4) }; var est = ML.Transforms.Text.TokenizeIntoWords("TokenizeB", "B") @@ -785,7 +785,7 @@ public void TestValueMapWithNonDefaultColumnOrder() var expectedCategories = new string[] { "Low", "High", "Low", "Low", "Medium" }; - for(int i = 0; i < features.Count; ++i) + for (int i = 0; i < features.Count; ++i) { var feature = features[i]; Assert.Equal(rawData[i].Price, feature.Price); diff --git a/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs b/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs index 36f1ea47b9..9f759f207e 100644 --- a/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs +++ b/test/Microsoft.ML.Tests/Transformers/WordTokenizeTests.cs @@ -27,8 +27,8 @@ private class TestClass public string[] B; } -// Visual Studio complains because the following class members are not never assigned. That is wrong because that class -// will be implicitly created in runtime and therefore we disable warning 169. + // Visual Studio complains because the following class members are not never assigned. That is wrong because that class + // will be implicitly created in runtime and therefore we disable warning 169. #pragma warning disable 169 // This is a C# native data structure used to capture the output of ML.NET tokenizer in the test below. public class NativeResult @@ -53,7 +53,7 @@ public void WordTokenizeWorkout() { var data = new[] { new TestClass() { A = "This is a good sentence.", B = new string[2] { "Much words", "Wow So Cool" } } }; var dataView = ML.Data.LoadFromEnumerable(data); - var invalidData = new[] { new TestWrong() { A =1, B = new float[2] { 2,3 } } }; + var invalidData = new[] { new TestWrong() { A = 1, B = new float[2] { 2, 3 } } }; var invalidDataView = ML.Data.LoadFromEnumerable(invalidData); var pipe = new WordTokenizingEstimator(Env, new[]{ new WordTokenizingEstimator.ColumnOptions("TokenizeA", "A"), diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs index 3ab1cbcf30..750b76520d 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeries.cs @@ -181,4 +181,4 @@ public void SavePipeMovingAverageNonUniform() } } -} \ No newline at end of file +} diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs index b694e63185..98c89d2544 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesDirectApi.cs @@ -245,7 +245,7 @@ public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() var modelPath = "temp.zip"; engine.CheckPoint(ml, modelPath); - //Load time series model and we will use this to pass two inputs and compare the raw score + //Load time series model and we will use this to pass two inputs and compare the raw score //with "engine". ITransformer model2 = null; using (var file = File.OpenRead(modelPath)) @@ -260,7 +260,7 @@ public void ChangePointDetectionWithSeasonalityPredictionEngineNoColumn() //Raw score after second input. Assert.Equal(0.12216401100158691, prediction2.Change[1], precision: 5); // Raw score - //Even though time series column is not requested it will + //Even though time series column is not requested it will // pass the observation through time series transform and update the state with the first input. var prediction = engine.Predict(new Data(1)); Assert.Equal(-1, prediction.Random); @@ -388,7 +388,7 @@ public void SsaForecast() var enumerator = env.Data.CreateEnumerable(output, true).GetEnumerator(); ForecastPrediction row = null; - // [TEST_STABILITY]: MKL generates different percision float number on Dotnet Core 3.1 + // [TEST_STABILITY]: MKL generates different precision float number on Dotnet Core 3.1 // and cause the forecast result differs #if NETCOREAPP3_1 List expectedForecast = new List() { 0.191492021f, 2.53994060f, 5.26454258f, 7.37313938f }; @@ -628,7 +628,7 @@ public void AnomalyDetectionWithSrCnn(bool loadDataFromFile) outputDataView, reuseRowObject: false); int k = 0; - + foreach (var prediction in predictionColumn) { switch (mode) @@ -908,7 +908,7 @@ public void AnomalyDetectionWithSrCnn(bool loadDataFromFile) { for (int i = 0; i < data.Count; ++i) { - data[i].Value = - data[i].Value; + data[i].Value = -data[i].Value; } } @@ -954,7 +954,7 @@ public void AnomalyDetectionWithSrCnn(bool loadDataFromFile) [Fact] public void RootCauseLocalization() { - // Create an root cause localizatiom input + // Create an root cause localization input var rootCauseLocalizationInput = new RootCauseLocalizationInput(GetRootCauseTimestamp(), GetRootCauseAnomalyDimension("UK", _rootCauseAggSymbol), new List() { new MetricSlice(GetRootCauseTimestamp(), GetRootCauseLocalizationPoints(_rootCauseAggSymbol)) }, AggregateType.Sum, _rootCauseAggSymbol); var ml = new MLContext(1); @@ -981,7 +981,7 @@ public void RootCauseLocalization() [Fact] public void MultiDimensionalRootCauseLocalization() { - // Create an root cause localizatiom input + // Create an root cause localization input var rootCauseLocalizationInput = new RootCauseLocalizationInput(GetRootCauseTimestamp(), GetRootCauseAnomalyDimension("UK", _rootCauseAggSymbol), new List() { new MetricSlice(GetRootCauseTimestamp(), GetRootCauseLocalizationPoints(_rootCauseAggSymbol)) }, AggregateType.Sum, _rootCauseAggSymbol); var ml = new MLContext(1); @@ -1026,11 +1026,11 @@ public void MultiDimensionalRootCauseLocalization() [Fact] public void RootCauseLocalizationForNullDimValue() { - // Create an root cause localizatiom input + // Create an root cause localization input object rootCauseAggSymbolForNullDimValue = null; - List slice = new List - { - new MetricSlice(GetRootCauseTimestamp(), GetRootCauseLocalizationPoints(rootCauseAggSymbolForNullDimValue)) + List slice = new List + { + new MetricSlice(GetRootCauseTimestamp(), GetRootCauseLocalizationPoints(rootCauseAggSymbolForNullDimValue)) }; var rootCauseLocalizationInput = new RootCauseLocalizationInput(GetRootCauseTimestamp(), GetRootCauseAnomalyDimension("UK", rootCauseAggSymbolForNullDimValue), slice, AggregateType.Sum, rootCauseAggSymbolForNullDimValue); @@ -1044,7 +1044,7 @@ public void RootCauseLocalizationForNullDimValue() Assert.Single(rootCause.Items[0].Path); Assert.Equal("DataCenter", rootCause.Items[0].Path[0]); - Dictionary expectedDim = new Dictionary + Dictionary expectedDim = new Dictionary { {"Country", "UK" }, {"DeviceType", rootCauseAggSymbolForNullDimValue }, @@ -1177,7 +1177,7 @@ private static DateTime GetRootCauseTimestamp() [Fact] public void RootCauseLocalizationForIntDimValue() { - // Create an root cause localizatiom input + // Create an root cause localization input List slice = new List { new MetricSlice(GetRootCauseTimestamp(), GetRootCauseLocalizationPointsForIntDimValue()) @@ -1210,11 +1210,11 @@ public void RootCauseLocalizationForIntDimValue() [Fact] public void RootCauseLocalizationForDiffDimValueType() { - // Create an root cause localizatiom input + // Create an root cause localization input Dictionary expectedDim = GetRootCauseAnomalyDimension(10, _rootCauseAggSymbolForIntDimValue); - List slice = new List - { - new MetricSlice(GetRootCauseTimestamp(), GetRootCauseLocalizationPointsForIntDimValue()) + List slice = new List + { + new MetricSlice(GetRootCauseTimestamp(), GetRootCauseLocalizationPointsForIntDimValue()) }; var rootCauseLocalizationInput = new RootCauseLocalizationInput(GetRootCauseTimestamp(), expectedDim, slice, AggregateType.Sum, _rootCauseAggSymbolForDiffDimValueType); diff --git a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs index f6b7943924..716b481957 100644 --- a/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs +++ b/test/Microsoft.ML.TimeSeries.Tests/TimeSeriesEstimatorTests.cs @@ -5,8 +5,8 @@ using System.Collections.Generic; using Microsoft.ML.Data; using Microsoft.ML.RunTests; -using Microsoft.ML.Transforms.TimeSeries; using Microsoft.ML.TestFramework.Attributes; +using Microsoft.ML.Transforms.TimeSeries; using Xunit; using Xunit.Abstractions; @@ -31,7 +31,7 @@ private class TestDataXY [VectorType(InputSize)] public float[] A; } - private class TestDataDifferntType + private class TestDataDifferentType { [VectorType(InputSize)] public string[] data_0; @@ -60,11 +60,11 @@ public void TestSsaChangePointEstimator() for (int i = 0; i < changeHistorySize; i++) data.Add(new Data(i * 100)); - var pipe = new SsaChangePointEstimator(Env, "Change", + var pipe = new SsaChangePointEstimator(Env, "Change", confidence, changeHistorySize, maxTrainingSize, seasonalitySize, "Value"); var xyData = new List { new TestDataXY() { A = new float[InputSize] } }; - var stringData = new List { new TestDataDifferntType() { data_0 = new string[InputSize] } }; + var stringData = new List { new TestDataDifferentType() { data_0 = new string[InputSize] } }; var invalidDataWrongNames = ML.Data.LoadFromEnumerable(xyData); var invalidDataWrongTypes = ML.Data.LoadFromEnumerable(stringData); @@ -100,7 +100,7 @@ public void TestSsaForecastingEstimator() confidenceUpperBoundColumn: "ConfidenceUpperBound"); var xyData = new List { new TestDataXY() { A = new float[InputSize] } }; - var stringData = new List { new TestDataDifferntType() { data_0 = new string[InputSize] } }; + var stringData = new List { new TestDataDifferentType() { data_0 = new string[InputSize] } }; var invalidDataWrongNames = ML.Data.LoadFromEnumerable(xyData); var invalidDataWrongTypes = ML.Data.LoadFromEnumerable(stringData); @@ -134,7 +134,7 @@ public void TestSsaSpikeEstimator() confidence, pValueHistorySize, maxTrainingSize, seasonalitySize, "Value"); var xyData = new List { new TestDataXY() { A = new float[InputSize] } }; - var stringData = new List { new TestDataDifferntType() { data_0 = new string[InputSize] } }; + var stringData = new List { new TestDataDifferentType() { data_0 = new string[InputSize] } }; var invalidDataWrongNames = ML.Data.LoadFromEnumerable(xyData); var invalidDataWrongTypes = ML.Data.LoadFromEnumerable(stringData); @@ -161,7 +161,7 @@ public void TestIidChangePointEstimator() "Change", confidence, changeHistorySize, "Value"); var xyData = new List { new TestDataXY() { A = new float[InputSize] } }; - var stringData = new List { new TestDataDifferntType() { data_0 = new string[InputSize] } }; + var stringData = new List { new TestDataDifferentType() { data_0 = new string[InputSize] } }; var invalidDataWrongNames = ML.Data.LoadFromEnumerable(xyData); var invalidDataWrongTypes = ML.Data.LoadFromEnumerable(stringData); @@ -188,7 +188,7 @@ public void TestIidSpikeEstimator() "Change", confidence, pValueHistorySize, "Value"); var xyData = new List { new TestDataXY() { A = new float[InputSize] } }; - var stringData = new List { new TestDataDifferntType() { data_0 = new string[InputSize] } }; + var stringData = new List { new TestDataDifferentType() { data_0 = new string[InputSize] } }; var invalidDataWrongNames = ML.Data.LoadFromEnumerable(xyData); var invalidDataWrongTypes = ML.Data.LoadFromEnumerable(stringData); diff --git a/test/RemoteExecutorConsoleApp/RemoteExecutorConsoleApp.cs b/test/RemoteExecutorConsoleApp/RemoteExecutorConsoleApp.cs index 13adabd9f6..113ee6a4c1 100644 --- a/test/RemoteExecutorConsoleApp/RemoteExecutorConsoleApp.cs +++ b/test/RemoteExecutorConsoleApp/RemoteExecutorConsoleApp.cs @@ -47,7 +47,7 @@ static int Main(string[] args) { a = Assembly.Load(assemblyName); } - catch(FileNotFoundException) + catch (FileNotFoundException) { a = Assembly.LoadFrom(assemblyName.Split(',')[0] + ".dll"); } @@ -110,7 +110,7 @@ static int Main(string[] args) catch (PlatformNotSupportedException) { } - + return exitCode; } diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/BaseTestClassAnalyzer.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/BaseTestClassAnalyzer.cs index 1a595eb959..1347cf4565 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/BaseTestClassAnalyzer.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/BaseTestClassAnalyzer.cs @@ -21,7 +21,7 @@ public sealed class BaseTestClassAnalyzer : DiagnosticAnalyzer private const string Description = "Test classes should be derived from BaseTestClass or FunctionalTestBaseClass."; - private static DiagnosticDescriptor Rule = + private static readonly DiagnosticDescriptor Rule = new DiagnosticDescriptor(DiagnosticId, Title, Format, Category, DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Description); @@ -89,7 +89,7 @@ public void AnalyzeNamedType(SymbolAnalysisContext context) private bool ExtendsBaseTestClass(INamedTypeSymbol namedType) { - if (_baseTestClass is null && + if (_baseTestClass is null && _ITbaseTestClass is null) return false; diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/BestFriendAnalyzer.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/BestFriendAnalyzer.cs index 309c00a132..605f71184c 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/BestFriendAnalyzer.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/BestFriendAnalyzer.cs @@ -26,7 +26,7 @@ public sealed class BestFriendAnalyzer : DiagnosticAnalyzer AssemblyAttributeName + " assembly-level attribute set. Even with friend access to that " + "assembly, such a usage requires that the item have the " + AttributeName + " on it."; - private static DiagnosticDescriptor Rule = + private static readonly DiagnosticDescriptor Rule = new DiagnosticDescriptor(DiagnosticId, Title, Format, Category, DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Description); diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/BestFriendOnPublicDeclarationsAnalyzer.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/BestFriendOnPublicDeclarationsAnalyzer.cs index 75b6545f3d..8b1821013a 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/BestFriendOnPublicDeclarationsAnalyzer.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/BestFriendOnPublicDeclarationsAnalyzer.cs @@ -22,7 +22,7 @@ public sealed class BestFriendOnPublicDeclarationsAnalyzer : DiagnosticAnalyzer private const string Description = "The " + AttributeName + " attribute is not valid on public identifiers."; - private static DiagnosticDescriptor Rule = + private static readonly DiagnosticDescriptor Rule = new DiagnosticDescriptor(DiagnosticId, Title, Format, Category, DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Description); @@ -49,7 +49,7 @@ private void CompilationStart(CompilationStartAnalysisContext context) if (attribute == null) continue; - + context.RegisterSymbolAction(c => AnalyzeCore(c, attribute), SymbolKind.NamedType, SymbolKind.Method, SymbolKind.Field, SymbolKind.Property); } } @@ -67,4 +67,4 @@ private void AnalyzeCore(SymbolAnalysisContext context, INamedTypeSymbol attribu context.ReportDiagnostic(diagnostic); } } -} \ No newline at end of file +} diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/ContractsCheckAnalyzer.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/ContractsCheckAnalyzer.cs index 32993bafd0..ef2bcdce78 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/ContractsCheckAnalyzer.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/ContractsCheckAnalyzer.cs @@ -77,7 +77,7 @@ internal static class DecodeMessageWithLoadContextDiagnostic NameofDiagnostic.Rule, ExceptionDiagnostic.Rule, SimpleMessageDiagnostic.Rule, DecodeMessageWithLoadContextDiagnostic.Rule); - private static HashSet _targetSet = new HashSet(new[] + private static readonly HashSet _targetSet = new HashSet(new[] { "Check", "CheckUserArg", "CheckParam", "CheckParamValue", "CheckRef", "CheckValue", "CheckNonEmpty", "CheckNonWhiteSpace", "CheckDecode", "CheckIO", "CheckAlive", "CheckValueOrNull", diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/InstanceInitializerAnalyzer.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/InstanceInitializerAnalyzer.cs index 5afc55aab7..d678267558 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/InstanceInitializerAnalyzer.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/InstanceInitializerAnalyzer.cs @@ -20,7 +20,7 @@ public sealed class InstanceInitializerAnalyzer : DiagnosticAnalyzer private const string Title = "No initializers on instance fields or properties"; private const string Format = "Member {0} has a {1} initializer outside the constructor"; - private static DiagnosticDescriptor Rule = + private static readonly DiagnosticDescriptor Rule = new DiagnosticDescriptor(DiagnosticId, Title, Format, Category, DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Descriptions.InstanceInitializerInConstructor); diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/NameFixProvider.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/NameFixProvider.cs index 4589441e6b..e6ae760e76 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/NameFixProvider.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/NameFixProvider.cs @@ -34,9 +34,9 @@ public sealed class NameFixProvider : CodeFixProvider private static ImmutableArray _fixable = ImmutableArray.Create( NameAnalyzer.PrivateFieldName.Id, NameAnalyzer.GeneralName.Id, ParameterVariableNameAnalyzer.Id, TypeParamNameAnalyzer.Id); - private static ImmutableHashSet _fixableSet = ImmutableHashSet.Empty.Union(_fixable); + private static readonly ImmutableHashSet _fixableSet = ImmutableHashSet.Empty.Union(_fixable); - private static Regex _sections = new Regex( + private static readonly Regex _sections = new Regex( @"(?:\p{Nd}\p{Ll}*)|" + // Numbers we consider a separate token. @"(?:\p{Lu}+(?!\p{Ll}))|" + // Completely upper case sections. @"(?:\p{Lu}\p{Ll}+)|" + // Title cased word. diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/ParameterVariableNameAnalyzer.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/ParameterVariableNameAnalyzer.cs index db60887534..e626a14cbc 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/ParameterVariableNameAnalyzer.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/ParameterVariableNameAnalyzer.cs @@ -22,7 +22,7 @@ public sealed class ParameterVariableNameAnalyzer : DiagnosticAnalyzer private const string Description = "Parameter and local variable names should be lowerCamelCased."; - private static DiagnosticDescriptor Rule = + private static readonly DiagnosticDescriptor Rule = new DiagnosticDescriptor(Id, Title, Format, Category, DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Description); @@ -59,4 +59,4 @@ private static void AnalyzeCore(SyntaxNodeAnalysisContext context, SyntaxToken i context.ReportDiagnostic(NameAnalyzer.CreateDiagnostic(Rule, identifier, NameType.CamelCased, type)); } } -} \ No newline at end of file +} diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/Properties/AssemblyInfo.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/Properties/AssemblyInfo.cs index ff55309f5e..006193c19a 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/Properties/AssemblyInfo.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/Properties/AssemblyInfo.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/SingleVariableDeclarationAnalyzer.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/SingleVariableDeclarationAnalyzer.cs index dc39e2b7b7..d88d9dae98 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/SingleVariableDeclarationAnalyzer.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/SingleVariableDeclarationAnalyzer.cs @@ -22,7 +22,7 @@ public sealed class SingleVariableDeclarationAnalyzer : DiagnosticAnalyzer private const string Description = "We prefer to have one variable per declaration."; - private static DiagnosticDescriptor Rule = + private static readonly DiagnosticDescriptor Rule = new DiagnosticDescriptor(DiagnosticId, Title, Format, Category, DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Description); @@ -48,4 +48,4 @@ private static void Analyze(SyntaxNodeAnalysisContext context) context.ReportDiagnostic(diagnostic); } } -} \ No newline at end of file +} diff --git a/tools-local/Microsoft.ML.InternalCodeAnalyzer/TypeParamNameAnalyzer.cs b/tools-local/Microsoft.ML.InternalCodeAnalyzer/TypeParamNameAnalyzer.cs index 8ee7e70a36..6aa9895f32 100644 --- a/tools-local/Microsoft.ML.InternalCodeAnalyzer/TypeParamNameAnalyzer.cs +++ b/tools-local/Microsoft.ML.InternalCodeAnalyzer/TypeParamNameAnalyzer.cs @@ -21,7 +21,7 @@ public sealed class TypeParamNameAnalyzer : DiagnosticAnalyzer private const string Description = "Type parameter names should start with 'T' and the remainder PascalCased."; - private static DiagnosticDescriptor Rule = + private static readonly DiagnosticDescriptor Rule = new DiagnosticDescriptor(Id, Title, Format, Category, DiagnosticSeverity.Warning, isEnabledByDefault: true, description: Description); @@ -46,4 +46,4 @@ private static void Analyze(SyntaxNodeAnalysisContext context) context.ReportDiagnostic(NameAnalyzer.CreateDiagnostic(Rule, identifier, NameType.TPascalCased)); } } -} \ No newline at end of file +}