diff --git a/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs b/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs
index 21d08eca30..a32b0358f9 100644
--- a/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs
+++ b/src/Microsoft.ML.AutoML/API/ExperimentSettings.cs
@@ -42,13 +42,13 @@ public abstract class ExperimentSettings
public CancellationToken CancellationToken { get; set; }
///
- /// This is a pointer to a directory where all models trained during the AutoML experiment will be saved.
+ /// This is the name of the directory where all models trained during the AutoML experiment will be saved.
/// If , models will be kept in memory instead of written to disk.
/// (Please note: for an experiment with high runtime operating on a large dataset, opting to keep models in
/// memory could cause a system to run out of memory.)
///
- /// The default value is the directory named "Microsoft.ML.AutoML" in the current user's temporary folder.
- public DirectoryInfo CacheDirectory { get; set; }
+ /// The default value is the directory named "Microsoft.ML.AutoML" in the in the location specified by the .
+ public string CacheDirectoryName { get; set; }
///
/// Whether AutoML should cache before ML.NET trainers.
@@ -66,10 +66,11 @@ public ExperimentSettings()
{
MaxExperimentTimeInSeconds = 24 * 60 * 60;
CancellationToken = default;
- CacheDirectory = new DirectoryInfo(Path.Combine(Path.GetTempPath(), "Microsoft.ML.AutoML"));
+ CacheDirectoryName = "Microsoft.ML.AutoML";
CacheBeforeTrainer = CacheBeforeTrainer.Auto;
MaxModels = int.MaxValue;
}
+
}
///
diff --git a/src/Microsoft.ML.AutoML/API/RankingExperiment.cs b/src/Microsoft.ML.AutoML/API/RankingExperiment.cs
index f6e039c3be..8709c122b0 100644
--- a/src/Microsoft.ML.AutoML/API/RankingExperiment.cs
+++ b/src/Microsoft.ML.AutoML/API/RankingExperiment.cs
@@ -35,6 +35,9 @@ public sealed class RankingExperimentSettings : ExperimentSettings
///
public uint OptimizationMetricTruncationLevel { get; set; }
+ ///
+ /// Initializes a new instance of .
+ ///
public RankingExperimentSettings()
{
OptimizingMetric = RankingMetric.Ndcg;
diff --git a/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs b/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs
index 438260a1eb..e86199be62 100644
--- a/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs
+++ b/src/Microsoft.ML.AutoML/API/RegressionExperiment.cs
@@ -31,6 +31,9 @@ public sealed class RegressionExperimentSettings : ExperimentSettings
///
public ICollection Trainers { get; }
+ ///
+ /// Initializes a new instance of .
+ ///
public RegressionExperimentSettings()
{
OptimizingMetric = RegressionMetric.RSquared;
diff --git a/src/Microsoft.ML.AutoML/Experiment/Experiment.cs b/src/Microsoft.ML.AutoML/Experiment/Experiment.cs
index 8b8bbe09f3..da9793b9f2 100644
--- a/src/Microsoft.ML.AutoML/Experiment/Experiment.cs
+++ b/src/Microsoft.ML.AutoML/Experiment/Experiment.cs
@@ -56,7 +56,7 @@ public Experiment(MLContext context,
_experimentSettings = experimentSettings;
_metricsAgent = metricsAgent;
_trainerAllowList = trainerAllowList;
- _modelDirectory = GetModelDirectory(_experimentSettings.CacheDirectory);
+ _modelDirectory = GetModelDirectory(_context.TempFilePath, _experimentSettings.CacheDirectoryName);
_datasetColumnInfo = datasetColumnInfo;
_runner = runner;
_logger = logger;
@@ -140,7 +140,7 @@ public IList Execute()
// Pseudo random number generator to result in deterministic runs with the provided main MLContext's seed and to
// maintain variability between training iterations.
- int? mainContextSeed = ((ISeededEnvironment)_context.Model.GetEnvironment()).Seed;
+ int? mainContextSeed = ((IHostEnvironmentInternal)_context.Model.GetEnvironment()).Seed;
_newContextSeedGenerator = (mainContextSeed.HasValue) ? RandomUtils.Create(mainContextSeed.Value) : null;
do
@@ -220,14 +220,14 @@ public IList Execute()
return iterationResults;
}
- private static DirectoryInfo GetModelDirectory(DirectoryInfo rootDir)
+ private static DirectoryInfo GetModelDirectory(string tempDirectory, string cacheDirectoryName)
{
- if (rootDir == null)
+ if (cacheDirectoryName == null)
{
return null;
}
- var experimentDirFullPath = Path.Combine(rootDir.FullName, $"experiment_{Path.GetRandomFileName()}");
+ var experimentDirFullPath = Path.Combine(tempDirectory, cacheDirectoryName, $"experiment_{Path.GetRandomFileName()}");
var experimentDirInfo = new DirectoryInfo(experimentDirFullPath);
if (!experimentDirInfo.Exists)
{
diff --git a/src/Microsoft.ML.Core/Data/IHostEnvironment.cs b/src/Microsoft.ML.Core/Data/IHostEnvironment.cs
index e010cf335e..1bfd5c3230 100644
--- a/src/Microsoft.ML.Core/Data/IHostEnvironment.cs
+++ b/src/Microsoft.ML.Core/Data/IHostEnvironment.cs
@@ -83,12 +83,17 @@ internal interface ICancelable
}
[BestFriend]
- internal interface ISeededEnvironment : IHostEnvironment
+ internal interface IHostEnvironmentInternal : IHostEnvironment
{
///
/// The seed property that, if assigned, makes components requiring randomness behave deterministically.
///
int? Seed { get; }
+
+ ///
+ /// The location for the temp files created by ML.NET
+ ///
+ string TempFilePath { get; set; }
}
///
diff --git a/src/Microsoft.ML.Core/Environment/HostEnvironmentBase.cs b/src/Microsoft.ML.Core/Environment/HostEnvironmentBase.cs
index 4a3efeec3d..37eebc4322 100644
--- a/src/Microsoft.ML.Core/Environment/HostEnvironmentBase.cs
+++ b/src/Microsoft.ML.Core/Environment/HostEnvironmentBase.cs
@@ -93,7 +93,7 @@ internal interface IMessageSource
/// query progress.
///
[BestFriend]
- internal abstract class HostEnvironmentBase : ChannelProviderBase, ISeededEnvironment, IChannelProvider, ICancelable
+ internal abstract class HostEnvironmentBase : ChannelProviderBase, IHostEnvironmentInternal, IChannelProvider, ICancelable
where TEnv : HostEnvironmentBase
{
void ICancelable.CancelExecution()
@@ -326,6 +326,10 @@ public void RemoveListener(Action listenerFunc)
}
}
+#pragma warning disable MSML_NoInstanceInitializers // Need this to have a default value incase the user doesn't set it.
+ public string TempFilePath { get; set; } = System.IO.Path.GetTempPath();
+#pragma warning restore MSML_NoInstanceInitializers
+
protected readonly TEnv Root;
// This is non-null iff this environment was a fork of another. Disposing a fork
// doesn't free temp files. That is handled when the master is disposed.
diff --git a/src/Microsoft.ML.Data/Commands/TrainTestCommand.cs b/src/Microsoft.ML.Data/Commands/TrainTestCommand.cs
index 58907622cb..3291a5f4d7 100644
--- a/src/Microsoft.ML.Data/Commands/TrainTestCommand.cs
+++ b/src/Microsoft.ML.Data/Commands/TrainTestCommand.cs
@@ -188,7 +188,7 @@ private void RunCore(IChannel ch, string cmd)
ILegacyDataLoader testPipe;
bool hasOutfile = !string.IsNullOrEmpty(ImplOptions.OutputModelFile);
- var tempFilePath = hasOutfile ? null : Path.GetTempFileName();
+ var tempFilePath = hasOutfile ? null : Path.Combine(((IHostEnvironmentInternal)Host).TempFilePath, Path.GetRandomFileName());
using (var file = new SimpleFileHandle(ch, hasOutfile ? ImplOptions.OutputModelFile : tempFilePath, true, !hasOutfile))
{
diff --git a/src/Microsoft.ML.Data/DataLoadSave/DataOperationsCatalog.cs b/src/Microsoft.ML.Data/DataLoadSave/DataOperationsCatalog.cs
index 268766ed88..e7dbc556d4 100644
--- a/src/Microsoft.ML.Data/DataLoadSave/DataOperationsCatalog.cs
+++ b/src/Microsoft.ML.Data/DataLoadSave/DataOperationsCatalog.cs
@@ -534,7 +534,7 @@ internal static string CreateSplitColumn(IHostEnvironment env, ref IDataView dat
}
else if(fallbackInEnvSeed)
{
- ISeededEnvironment seededEnv = (ISeededEnvironment)env;
+ IHostEnvironmentInternal seededEnv = (IHostEnvironmentInternal)env;
seedToUse = seededEnv.Seed;
}
else
diff --git a/src/Microsoft.ML.Data/MLContext.cs b/src/Microsoft.ML.Data/MLContext.cs
index ccb708addc..b653f61b2e 100644
--- a/src/Microsoft.ML.Data/MLContext.cs
+++ b/src/Microsoft.ML.Data/MLContext.cs
@@ -14,7 +14,7 @@ namespace Microsoft.ML
/// create components for data preparation, feature enginering, training, prediction, model evaluation.
/// It also allows logging, execution control, and the ability set repeatable random numbers.
///
- public sealed class MLContext : ISeededEnvironment
+ public sealed class MLContext : IHostEnvironmentInternal
{
// REVIEW: consider making LocalEnvironment and MLContext the same class instead of encapsulation.
private readonly LocalEnvironment _env;
@@ -79,6 +79,15 @@ public sealed class MLContext : ISeededEnvironment
///
public ComponentCatalog ComponentCatalog => _env.ComponentCatalog;
+ ///
+ /// Gets or sets the location for the temp files created by ML.NET.
+ ///
+ public string TempFilePath
+ {
+ get { return _env.TempFilePath; }
+ set { _env.TempFilePath = value; }
+ }
+
///
/// Create the ML context.
///
@@ -140,7 +149,7 @@ private void ProcessMessage(IMessageSource source, ChannelMessage message)
IChannel IChannelProvider.Start(string name) => _env.Start(name);
IPipe IChannelProvider.StartPipe(string name) => _env.StartPipe(name);
IProgressChannel IProgressChannelProvider.StartProgressChannel(string name) => _env.StartProgressChannel(name);
- int? ISeededEnvironment.Seed => _env.Seed;
+ int? IHostEnvironmentInternal.Seed => _env.Seed;
[BestFriend]
internal void CancelExecution() => ((ICancelable)_env).CancelExecution();
diff --git a/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs b/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs
index df94857931..6a4d98750c 100644
--- a/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs
+++ b/src/Microsoft.ML.OnnxTransformer/OnnxTransform.cs
@@ -227,7 +227,7 @@ private OnnxTransformer(IHostEnvironment env, Options options, byte[] modelBytes
{
// Entering this region means that the byte[] is passed as the model. To feed that byte[] to ONNXRuntime, we need
// to create a temporal file to store it and then call ONNXRuntime's API to load that file.
- Model = OnnxModel.CreateFromBytes(modelBytes, options.GpuDeviceId, options.FallbackToCpu, shapeDictionary: shapeDictionary);
+ Model = OnnxModel.CreateFromBytes(modelBytes, env, options.GpuDeviceId, options.FallbackToCpu, shapeDictionary: shapeDictionary);
}
}
catch (OnnxRuntimeException e)
@@ -304,7 +304,7 @@ private protected override void SaveModel(ModelSaveContext ctx)
ctx.CheckAtModel();
ctx.SetVersionInfo(GetVersionInfo());
- ctx.SaveBinaryStream("OnnxModel", w => { w.WriteByteArray(File.ReadAllBytes(Model.ModelFile)); });
+ ctx.SaveBinaryStream("OnnxModel", w => { w.WriteByteArray(File.ReadAllBytes(Model.ModelStream.Name)); });
Host.CheckNonEmpty(Inputs, nameof(Inputs));
ctx.Writer.Write(Inputs.Length);
diff --git a/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs b/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs
index cdba65a4b1..c44a0cda7e 100644
--- a/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs
+++ b/src/Microsoft.ML.OnnxTransformer/OnnxUtils.cs
@@ -143,14 +143,9 @@ public OnnxVariableInfo(string name, OnnxShape shape, Type typeInOnnxRuntime, Da
///
private readonly InferenceSession _session;
///
- /// Indicates if is a temporal file created by
- /// or . If , should delete .
+ /// The FileStream holding onto the loaded ONNX model.
///
- private bool _ownModelFile;
- ///
- /// The location where the used ONNX model loaded from.
- ///
- internal string ModelFile { get; }
+ internal FileStream ModelStream { get; }
///
/// The ONNX model's information from ONNXRuntime's perspective. ML.NET can change the input and output of that model in some ways.
/// For example, ML.NET can shuffle the inputs so that the i-th ONNX input becomes the j-th input column of .
@@ -172,9 +167,7 @@ public OnnxVariableInfo(string name, OnnxShape shape, Type typeInOnnxRuntime, Da
public OnnxModel(string modelFile, int? gpuDeviceId = null, bool fallbackToCpu = false,
bool ownModelFile=false, IDictionary shapeDictionary = null)
{
- ModelFile = modelFile;
// If we don't own the model file, _disposed should be false to prevent deleting user's file.
- _ownModelFile = ownModelFile;
_disposed = false;
if (gpuDeviceId != null)
@@ -202,9 +195,15 @@ public OnnxModel(string modelFile, int? gpuDeviceId = null, bool fallbackToCpu =
{
// Load ONNX model file and parse its input and output schema. The reason of doing so is that ONNXRuntime
// doesn't expose full type information via its C# APIs.
- ModelFile = modelFile;
var model = new OnnxCSharpToProtoWrapper.ModelProto();
- using (var modelStream = File.OpenRead(modelFile))
+ // If we own the model file set the DeleteOnClose flag so it is always deleted.
+ if (ownModelFile)
+ ModelStream = new FileStream(modelFile, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.DeleteOnClose);
+ else
+ ModelStream = new FileStream(modelFile, FileMode.Open, FileAccess.Read);
+
+ // The CodedInputStream auto closes the stream, and we need to make sure that our main stream stays open, so creating a new one here.
+ using (var modelStream = new FileStream(modelFile, FileMode.Open, FileAccess.Read, FileShare.Delete | FileShare.Read))
using (var codedStream = Google.Protobuf.CodedInputStream.CreateWithLimits(modelStream, Int32.MaxValue, 10))
model = OnnxCSharpToProtoWrapper.ModelProto.Parser.ParseFrom(codedStream);
@@ -322,33 +321,35 @@ private static bool CheckOnnxShapeCompatibility(IEnumerable left, IEnumerab
///
/// Create an OnnxModel from a byte[]. Usually, a ONNX model is consumed by as a file.
- /// With and ,
+ /// With and ,
/// it's possible to use in-memory model (type: byte[]) to create .
///
/// Bytes of the serialized model
- public static OnnxModel CreateFromBytes(byte[] modelBytes)
+ /// IHostEnvironment
+ public static OnnxModel CreateFromBytes(byte[] modelBytes, IHostEnvironment env)
{
- return CreateFromBytes(modelBytes, null, false);
+ return CreateFromBytes(modelBytes, env, null, false);
}
///
/// Create an OnnxModel from a byte[]. Set execution to GPU if required.
/// Usually, a ONNX model is consumed by as a file.
- /// With and
- /// ,
+ /// With and
+ /// ,
/// it's possible to use in-memory model (type: byte[]) to create .
///
/// Bytes of the serialized model.
+ /// IHostEnvironment
/// GPU device ID to execute on. Null for CPU.
/// If true, resumes CPU execution quietly upon GPU error.
/// User-provided shapes. If the key "myTensorName" is associated
/// with the value [1, 3, 5], the shape of "myTensorName" will be set to [1, 3, 5].
/// The shape loaded from would be overwritten.
/// An
- public static OnnxModel CreateFromBytes(byte[] modelBytes, int? gpuDeviceId = null, bool fallbackToCpu = false,
+ public static OnnxModel CreateFromBytes(byte[] modelBytes, IHostEnvironment env, int? gpuDeviceId = null, bool fallbackToCpu = false,
IDictionary shapeDictionary = null)
{
- var tempModelFile = Path.GetTempFileName();
+ var tempModelFile = Path.Combine(((IHostEnvironmentInternal)env).TempFilePath, Path.GetRandomFileName());
File.WriteAllBytes(tempModelFile, modelBytes);
return new OnnxModel(tempModelFile, gpuDeviceId, fallbackToCpu,
ownModelFile: true, shapeDictionary: shapeDictionary);
@@ -366,7 +367,7 @@ public IDisposableReadOnlyCollection Run(List
- /// Flag used to indicate if the unmanaged resources (aka the model file
+ /// Flag used to indicate if the unmanaged resources (aka the model file handle
/// and ) have been deleted.
///
private bool _disposed;
@@ -378,8 +379,7 @@ public void Dispose()
}
///
- /// There are two unmanaged resources we can dispose, and
- /// if is .
+ /// There are two unmanaged resources we can dispose, and
///
///
private void Dispose(bool disposing)
@@ -391,9 +391,8 @@ private void Dispose(bool disposing)
{
// First, we release the resource token by ONNXRuntime.
_session.Dispose();
- // Second, we delete the model file if that file is not created by the user.
- if (_ownModelFile && File.Exists(ModelFile))
- File.Delete(ModelFile);
+ // Second, Dispose of the model file stream.
+ ModelStream.Dispose();
}
_disposed = true;
}
diff --git a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs
index 1537ab10cf..6f88331829 100644
--- a/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs
+++ b/src/Microsoft.ML.TensorFlow/TensorflowTransform.cs
@@ -143,7 +143,7 @@ private static TensorFlowTransformer Create(IHostEnvironment env, ModelLoadConte
return new TensorFlowTransformer(env, LoadTFSession(env, modelBytes), outputs, inputs, null, false, addBatchDimensionInput, treatOutputAsBatched: treatOutputAsBatched);
}
- var tempDirPath = Path.GetFullPath(Path.Combine(Path.GetTempPath(), nameof(TensorFlowTransformer) + "_" + Guid.NewGuid()));
+ var tempDirPath = Path.GetFullPath(Path.Combine(((IHostEnvironmentInternal)env).TempFilePath, nameof(TensorFlowTransformer) + "_" + Guid.NewGuid()));
CreateFolderWithAclIfNotExists(env, tempDirPath);
try
{
diff --git a/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs b/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs
index 8fbbd772a0..022c09965f 100644
--- a/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs
+++ b/src/Microsoft.ML.TensorFlow/TensorflowUtils.cs
@@ -630,9 +630,9 @@ public void Dispose()
}
}
- internal static string GetTemporaryDirectory()
+ internal static string GetTemporaryDirectory(IHostEnvironment env)
{
- string tempDirectory = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
+ string tempDirectory = Path.Combine(((IHostEnvironmentInternal)env).TempFilePath, Path.GetRandomFileName());
Directory.CreateDirectory(tempDirectory);
return tempDirectory;
}
diff --git a/src/Microsoft.ML.Vision/DnnRetrainTransform.cs b/src/Microsoft.ML.Vision/DnnRetrainTransform.cs
index 240b2773f7..53a9d0c352 100644
--- a/src/Microsoft.ML.Vision/DnnRetrainTransform.cs
+++ b/src/Microsoft.ML.Vision/DnnRetrainTransform.cs
@@ -116,7 +116,7 @@ private static DnnRetrainTransformer Create(IHostEnvironment env, ModelLoadConte
null, false, addBatchDimensionInput, 1);
}
- var tempDirPath = Path.GetFullPath(Path.Combine(Path.GetTempPath(), nameof(DnnRetrainTransformer) + "_" + Guid.NewGuid()));
+ var tempDirPath = Path.GetFullPath(Path.Combine(((IHostEnvironmentInternal)env).TempFilePath, nameof(DnnRetrainTransformer) + "_" + Guid.NewGuid()));
CreateFolderWithAclIfNotExists(env, tempDirPath);
try
{
diff --git a/src/Microsoft.ML.Vision/ImageClassificationTrainer.cs b/src/Microsoft.ML.Vision/ImageClassificationTrainer.cs
index e5fe486ae5..7824838dea 100644
--- a/src/Microsoft.ML.Vision/ImageClassificationTrainer.cs
+++ b/src/Microsoft.ML.Vision/ImageClassificationTrainer.cs
@@ -487,7 +487,7 @@ public sealed class Options : TrainerInputBaseWithLabel
private readonly bool _cleanupWorkspace;
private int _classCount;
private Graph Graph => _session.graph;
- private static readonly string _resourcePath = Path.Combine(Path.GetTempPath(), "MLNET");
+ private readonly string _resourcePath;
private readonly string _sizeFile;
///
@@ -531,10 +531,11 @@ internal ImageClassificationTrainer(IHostEnvironment env, Options options)
Host.CheckNonEmpty(options.ScoreColumnName, nameof(options.ScoreColumnName));
Host.CheckNonEmpty(options.PredictedLabelColumnName, nameof(options.PredictedLabelColumnName));
tf.compat.v1.disable_eager_execution();
+ _resourcePath = Path.Combine(((IHostEnvironmentInternal)env).TempFilePath, "MLNET");
if (string.IsNullOrEmpty(options.WorkspacePath))
{
- options.WorkspacePath = GetTemporaryDirectory();
+ options.WorkspacePath = GetTemporaryDirectory(env);
_cleanupWorkspace = true;
}
@@ -1319,7 +1320,7 @@ private void AddTransferLearningLayer(string labelColumn,
}
- private static TensorFlowSessionWrapper LoadTensorFlowSessionFromMetaGraph(IHostEnvironment env, Architecture arch)
+ private TensorFlowSessionWrapper LoadTensorFlowSessionFromMetaGraph(IHostEnvironment env, Architecture arch)
{
var modelFileName = ModelFileName[arch];
var modelFilePath = Path.Combine(_resourcePath, modelFileName);
diff --git a/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs b/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs
index 2e3f95693b..202b3171c1 100644
--- a/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs
+++ b/test/Microsoft.ML.OnnxTransformerTest/OnnxTransformTests.cs
@@ -681,16 +681,15 @@ public void TestOnnxModelDisposal()
var modelInBytes = File.ReadAllBytes(modelFile);
// Create ONNX model from the byte[].
- var onnxModel = OnnxModel.CreateFromBytes(modelInBytes);
+ var onnxModel = OnnxModel.CreateFromBytes(modelInBytes, ML);
// Check if a temporal file is crated for storing the byte[].
- Assert.True(File.Exists(onnxModel.ModelFile));
+ Assert.True(File.Exists(onnxModel.ModelStream.Name));
// Delete the temporal file.
onnxModel.Dispose();
-
// Make sure the temporal file is deleted.
- Assert.False(File.Exists(onnxModel.ModelFile));
+ Assert.False(File.Exists(onnxModel.ModelStream.Name));
}
[OnnxFact]
@@ -703,13 +702,13 @@ public void TestOnnxModelNotDisposal()
var onnxModel = new OnnxModel(modelFile);
// Check if a temporal file is crated for storing the byte[].
- Assert.True(File.Exists(onnxModel.ModelFile));
+ Assert.True(File.Exists(onnxModel.ModelStream.Name));
// Don't delete the temporal file!
onnxModel.Dispose();
// Make sure the temporal file still exists.
- Assert.True(File.Exists(onnxModel.ModelFile));
+ Assert.True(File.Exists(onnxModel.ModelStream.Name));
}
private class OnnxMapInput
@@ -859,7 +858,7 @@ private void TryModelWithCustomShapesHelper(IDictionary shapeDict
}
});
- // Define a ONNX transform, trains it, and apply it to the input data.
+ // Define a ONNX transform, trains it, and apply it to the input data.
var pipeline = ML.Transforms.ApplyOnnxModel(new[] { "outa", "outb" }, new[] { "ina", "inb" },
modelFile, shapeDictionary, gpuDeviceId: _gpuDeviceId, fallbackToCpu: _fallbackToCpu);
}
diff --git a/test/Microsoft.ML.PerformanceTests/FeaturizeTextBench.cs b/test/Microsoft.ML.PerformanceTests/FeaturizeTextBench.cs
index fdb7475a8c..5916de4082 100644
--- a/test/Microsoft.ML.PerformanceTests/FeaturizeTextBench.cs
+++ b/test/Microsoft.ML.PerformanceTests/FeaturizeTextBench.cs
@@ -25,7 +25,6 @@ public class FeaturizeTextBench : BenchmarkBase
[GlobalSetup]
public void SetupData()
{
- Path.GetTempFileName();
_mlContext = new MLContext(seed: 1);
var path = Path.GetTempFileName();
Console.WriteLine($"Created dataset in temporary file:\n{path}\n");
diff --git a/test/Microsoft.ML.PerformanceTests/TextLoaderBench.cs b/test/Microsoft.ML.PerformanceTests/TextLoaderBench.cs
index 73abceed60..2aabb2f91b 100644
--- a/test/Microsoft.ML.PerformanceTests/TextLoaderBench.cs
+++ b/test/Microsoft.ML.PerformanceTests/TextLoaderBench.cs
@@ -28,7 +28,6 @@ public class TextLoaderBench : BenchmarkBase
[GlobalSetup]
public void SetupData()
{
- Path.GetTempFileName();
_mlContext = new MLContext(seed: 1);
var path = Path.GetTempFileName();
Console.WriteLine($"Created dataset in temporary file:\n{path}\n");