-
Notifications
You must be signed in to change notification settings - Fork 0
/
MLModel.consumption.cs
75 lines (61 loc) · 2.25 KB
/
MLModel.consumption.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
// This file was auto-generated by ML.NET Model Builder.
using Microsoft.ML;
using Microsoft.ML.Data;
using System;
using System.Linq;
using System.IO;
using System.Collections.Generic;
namespace LanguageSentimentAnalysis
{
public partial class MLModel
{
/// <summary>
/// model input class for MLModel.
/// </summary>
#region model input class
public class ModelInput
{
[ColumnName(@"ReviewText")]
public string ReviewText { get; set; }
[ColumnName(@"Sentiment")]
public string Sentiment { get; set; }
}
#endregion
/// <summary>
/// model output class for MLModel.
/// </summary>
#region model output class
public class ModelOutput
{
[ColumnName(@"ReviewText")]
public float[] ReviewText { get; set; }
[ColumnName(@"Sentiment")]
public uint Sentiment { get; set; }
[ColumnName(@"Features")]
public float[] Features { get; set; }
[ColumnName(@"PredictedLabel")]
public string PredictedLabel { get; set; }
[ColumnName(@"Score")]
public float[] Score { get; set; }
}
#endregion
private static string MLNetModelPath = Path.GetFullPath("MLModel.zip");
public static readonly Lazy<PredictionEngine<ModelInput, ModelOutput>> PredictEngine = new Lazy<PredictionEngine<ModelInput, ModelOutput>>(() => CreatePredictEngine(), true);
/// <summary>
/// Use this method to predict on <see cref="ModelInput"/>.
/// </summary>
/// <param name="input">model input.</param>
/// <returns><seealso cref=" ModelOutput"/></returns>
public static ModelOutput Predict(ModelInput input)
{
var predEngine = PredictEngine.Value;
return predEngine.Predict(input);
}
private static PredictionEngine<ModelInput, ModelOutput> CreatePredictEngine()
{
var mlContext = new MLContext();
ITransformer mlModel = mlContext.Model.Load(MLNetModelPath, out var _);
return mlContext.Model.CreatePredictionEngine<ModelInput, ModelOutput>(mlModel);
}
}
}