DEEP-13, DEEP-39 AI Model completely reimplemented. Dashboard UI implemented. Namespace style changed

This commit is contained in:
Andrey Shabarshov 2023-07-30 16:20:41 +01:00
parent af6c75a5ac
commit bfb3de9331
36 changed files with 1180 additions and 1088 deletions

View File

@ -2,30 +2,29 @@
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
using System.Text; using System.Text;
namespace DeepTrace.Controllers namespace DeepTrace.Controllers;
[ApiController]
[Route("api/[controller]")]
public class DownloadController : Controller
{ {
[ApiController] private readonly IModelStorageService _modelService;
[Route("api/[controller]")]
public class DownloadController : Controller public DownloadController(IModelStorageService modelService)
{ {
private readonly IModelStorageService _modelService; _modelService = modelService;
}
public DownloadController(IModelStorageService modelService) [HttpGet("mldata/{modelName}")]
public async Task<FileContentResult> GetMLDataCsv([FromRoute] string modelName)
{
var ModelDefinition = await _modelService.Load();
var model = ModelDefinition.FirstOrDefault(x=>x.Name==modelName) ?? throw new ApplicationException($"Model {modelName} not found");
var csv = model.ToCsv();
return new(Encoding.UTF8.GetBytes(csv),"text/csv")
{ {
_modelService = modelService; FileDownloadName = modelName+".csv"
} };
[HttpGet("mldata/{modelName}")]
public async Task<FileContentResult> GetMLDataCsv([FromRoute] string modelName)
{
var ModelDefinition = await _modelService.Load();
var model = ModelDefinition.FirstOrDefault(x=>x.Name==modelName) ?? throw new ApplicationException($"Model {modelName} not found");
var csv = model.ToCsv();
return new(Encoding.UTF8.GetBytes(csv),"text/csv")
{
FileDownloadName = modelName+".csv"
};
}
} }
} }

View File

@ -4,31 +4,31 @@
</DialogContent> </DialogContent>
<DialogActions> <DialogActions>
@if( IsYesNoCancel ) @if( IsYesNoCancel )
{
<MudButton Color="MudBlazor.Color.Primary" OnClick="Yes">Yes</MudButton>
<MudButton OnClick="No">No</MudButton>
<MudButton OnClick="Cancel">Cancel</MudButton>
}
else
{
if (AllowCancel)
{ {
<MudButton Color="MudBlazor.Color.Primary" OnClick="Yes">Yes</MudButton>
<MudButton OnClick="No">No</MudButton>
<MudButton OnClick="Cancel">Cancel</MudButton> <MudButton OnClick="Cancel">Cancel</MudButton>
} }
else <MudButton Color="MudBlazor.Color.Primary" OnClick="Submit">Ok</MudButton>
{ }
if (AllowCancel)
{
<MudButton OnClick="Cancel">Cancel</MudButton>
}
<MudButton Color="MudBlazor.Color.Primary" OnClick="Submit">Ok</MudButton>
}
</DialogActions> </DialogActions>
</MudDialog> </MudDialog>
@code { @code {
[CascadingParameter] MudDialogInstance? MudDialog { get; set; } [CascadingParameter] MudDialogInstance? MudDialog { get; set; }
[Parameter] public bool AllowCancel { get; set; } [Parameter] public bool AllowCancel { get; set; }
[Parameter] public string Text { get; set; } = ""; [Parameter] public string Text { get; set; } = "";
[Parameter] public bool IsYesNoCancel { get; set; } = false; [Parameter] public bool IsYesNoCancel { get; set; } = false;
void Submit() => MudDialog?.Close(DialogResult.Ok(true)); void Submit() => MudDialog?.Close(DialogResult.Ok(true));
void Cancel() => MudDialog?.Cancel(); void Cancel() => MudDialog?.Cancel();
void Yes() => MudDialog?.Close(DialogResult.Ok(true)); void Yes() => MudDialog?.Close(DialogResult.Ok(true));
void No() => MudDialog?.Close(DialogResult.Ok(false)); void No() => MudDialog?.Close(DialogResult.Ok(false));
} }

View File

@ -7,10 +7,16 @@
@inject IDialogService DialogService @inject IDialogService DialogService
@inject IModelStorageService ModelService @inject IModelStorageService ModelService
@inject ITrainedModelStorageService TrainedModelService @inject ITrainedModelStorageService TrainedModelService
@inject ILogger<MLProcessor> MLProcessorLogger
@inject ILogger<ModelCard> Logger @inject ILogger<ModelCard> Logger
@inject IMLProcessorFactory MlProcessorFactory
<MudCard Class="mb-3"> <style>
.card {
max-width: 250pt;
}
</style>
<MudCard Class="card mb-3">
<MudCardHeader> <MudCardHeader>
<CardHeaderContent> <CardHeaderContent>
<MudText Typo="Typo.h6">@Model?.Name</MudText> <MudText Typo="Typo.h6">@Model?.Name</MudText>
@ -21,6 +27,7 @@
</MudCardHeader> </MudCardHeader>
<MudCardContent> <MudCardContent>
<MudText>Current state: @_prediction.PredictedLabel</MudText> <MudText>Current state: @_prediction.PredictedLabel</MudText>
<MudText>@_updated.ToString("HH:mm:ss")</MudText>
</MudCardContent> </MudCardContent>
</MudCard> </MudCard>
@ -29,7 +36,9 @@
public TrainedModelDefinition? Model { get; set; } public TrainedModelDefinition? Model { get; set; }
private ModelDefinition _modelDefinition = new(); private ModelDefinition _modelDefinition = new();
private Prediction _prediction = new(); private Prediction _prediction = new();
private IMLProcessor? _mlProcessor;
private DateTime _updated = DateTime.MinValue;
protected override async Task OnAfterRenderAsync(bool firstRender) protected override async Task OnAfterRenderAsync(bool firstRender)
{ {
@ -38,6 +47,7 @@
return; return;
} }
_modelDefinition = (await ModelService.Load(Model.Id)) ?? _modelDefinition; _modelDefinition = (await ModelService.Load(Model.Id)) ?? _modelDefinition;
_mlProcessor = MlProcessorFactory.Create();
#pragma warning disable CS4014 #pragma warning disable CS4014
Task.Run(PredictionLoop); Task.Run(PredictionLoop);
@ -87,15 +97,20 @@
await PredictAnomaly(startDate, endDate); await PredictAnomaly(startDate, endDate);
startDate = endDate; startDate = endDate;
} }
catch(Exception) catch(Exception e)
{ {
//ignore Logger.LogError(e, e.Message);
} }
} }
} }
private async Task PredictAnomaly(DateTime startDate, DateTime endDate) private async Task PredictAnomaly(DateTime startDate, DateTime endDate)
{ {
if (Model == null || !Model.IsEnabled)
{
_prediction = new Prediction { PredictedLabel = "Idle" };
return;
}
// use automatic step value to always request 500 elements // use automatic step value to always request 500 elements
var seconds = (endDate - startDate).TotalSeconds / 500.0; var seconds = (endDate - startDate).TotalSeconds / 500.0;
@ -150,8 +165,9 @@
); );
} }
var mlProcessor = new MLProcessor(MLProcessorLogger); _prediction = await _mlProcessor!.Predict(Model, _modelDefinition, data);
_prediction = await mlProcessor.Predict(Model, _modelDefinition, data); _updated = DateTime.Now;
await InvokeAsync(StateHasChanged);
} }
private async Task ShowError(string text) private async Task ShowError(string text)

View File

@ -9,8 +9,8 @@
OnZoomed="OnZoomed" OnZoomed="OnZoomed"
> >
@foreach (var ts in _currentData.Series) @foreach (var ts in _currentData.Series)
{ {
<ApexPointSeries TItem="TimeSeries" <ApexPointSeries TItem="TimeSeries"
Name="@ts.Name" Name="@ts.Name"
Items="@ts.Data" Items="@ts.Data"
SeriesType="SeriesType.Line" SeriesType="SeriesType.Line"
@ -18,164 +18,164 @@
YAggregate="@(e => (decimal)e.Sum(e => e.Value))" YAggregate="@(e => (decimal)e.Sum(e => e.Value))"
ShowDataLabels="false" ShowDataLabels="false"
/> />
} }
</ApexChart> </ApexChart>
@code { @code {
[CascadingParameter] [CascadingParameter]
protected bool IsDarkMode { get; set; } protected bool IsDarkMode { get; set; }
[Parameter] public TimeSeriesData? Data { get; set; } [Parameter] public TimeSeriesData? Data { get; set; }
[Parameter] public DateTime? MinDate { get; set; } [Parameter] public DateTime? MinDate { get; set; }
[Parameter] public DateTime? MaxDate { get; set; } [Parameter] public DateTime? MaxDate { get; set; }
[Parameter] public EventCallback<DateTime?> MinDateChanged { get; set; } [Parameter] public EventCallback<DateTime?> MinDateChanged { get; set; }
[Parameter] public EventCallback<DateTime?> MaxDateChanged { get; set; } [Parameter] public EventCallback<DateTime?> MaxDateChanged { get; set; }
private ApexChart<TimeSeries>? _chart; private ApexChart<TimeSeries>? _chart;
private ApexChartOptions<TimeSeries>? _options; private ApexChartOptions<TimeSeries>? _options;
private TimeSeriesData _currentData = new() { Series = { new () } }; private TimeSeriesData _currentData = new() { Series = { new () } };
protected override void OnInitialized() protected override void OnInitialized()
{ {
_options = CreateOptions(); _options = CreateOptions();
base.OnInitialized(); base.OnInitialized();
} }
protected override async Task OnParametersSetAsync() protected override async Task OnParametersSetAsync()
{ {
Console.WriteLine("OnParametersSet"); Console.WriteLine("OnParametersSet");
await UpdateChart(); await UpdateChart();
await base.OnParametersSetAsync(); await base.OnParametersSetAsync();
} }
private async Task UpdateChart() private async Task UpdateChart()
{ {
if (Data == _currentData) if (Data == _currentData)
return; return;
_currentData = Data ?? new() { Series = { new() } }; ; _currentData = Data ?? new() { Series = { new() } }; ;
_options = CreateOptions(); _options = CreateOptions();
if (_chart == null) if (_chart == null)
return; return;
//await InvokeAsync(StateHasChanged); //await InvokeAsync(StateHasChanged);
await _chart!.UpdateSeriesAsync(); await _chart!.UpdateSeriesAsync();
await _chart!.UpdateOptionsAsync(true, true, true); await _chart!.UpdateOptionsAsync(true, true, true);
await InvokeAsync(StateHasChanged); await InvokeAsync(StateHasChanged);
} }
private ApexChartOptions<TimeSeries> CreateOptions() private ApexChartOptions<TimeSeries> CreateOptions()
{ {
var backgroundColor = IsDarkMode ? "var(--mud-palette-surface)" : "#f3f3f3"; var backgroundColor = IsDarkMode ? "var(--mud-palette-surface)" : "#f3f3f3";
var gridColor = IsDarkMode ? "var(--mud-palette-drawer-background)" : "#f3f3f3"; var gridColor = IsDarkMode ? "var(--mud-palette-drawer-background)" : "#f3f3f3";
var borderColor = IsDarkMode ? "var(--mud-palette-text-primary)" : "#e7e7e7"; var borderColor = IsDarkMode ? "var(--mud-palette-text-primary)" : "#e7e7e7";
var lineColors = _currentData.Series.Select( x => x.Color).ToList(); var lineColors = _currentData.Series.Select( x => x.Color).ToList();
var mode = IsDarkMode var mode = IsDarkMode
? Mode.Dark ? Mode.Dark
: Mode.Light : Mode.Light
; ;
var options = new ApexChartOptions<TimeSeries> var options = new ApexChartOptions<TimeSeries>
{ {
Chart = new() Chart = new()
{ {
Background = backgroundColor, Background = backgroundColor,
Toolbar = new() Toolbar = new()
{ {
Show = true Show = true
}, },
DropShadow = new() DropShadow = new()
{ {
Enabled = false, Enabled = false,
Color = "", Color = "",
Top = 18, Top = 18,
Left = 7, Left = 7,
Blur = 10, Blur = 10,
Opacity = 0.2d Opacity = 0.2d
} }
}, },
DataLabels = new() DataLabels = new()
{ {
Enabled = false Enabled = false
}, },
Tooltip = new ApexCharts.Tooltip Tooltip = new ApexCharts.Tooltip
{ {
Y = new () Y = new ()
{ {
Formatter = @"function(value, opts) { Formatter = @"function(value, opts) {
if (value === undefined) {return '';} if (value === undefined) {return '';}
return Number(value).toLocaleString();}", return Number(value).toLocaleString();}",
}, },
X = new () X = new ()
{ {
Formatter = @"function(value, opts) { Formatter = @"function(value, opts) {
if (value === undefined) {return '';} if (value === undefined) {return '';}
return (new Date(value)).toISOString();}", return (new Date(value)).toISOString();}",
} }
}, },
Xaxis = new() Xaxis = new()
{ {
Type = XAxisType.Datetime Type = XAxisType.Datetime
}, },
Grid = new() Grid = new()
{ {
BorderColor = borderColor, BorderColor = borderColor,
Row = new() Row = new()
{ {
Colors = new List<string> { gridColor, "transparent" }, Colors = new List<string> { gridColor, "transparent" },
Opacity = 0.5d Opacity = 0.5d
} }
}, },
Colors = lineColors, Colors = lineColors,
//Markers = new() { Shape = ShapeEnum.Circle, Size = 2, FillOpacity = new Opacity(0.8d) }, //Markers = new() { Shape = ShapeEnum.Circle, Size = 2, FillOpacity = new Opacity(0.8d) },
Stroke = new() { Curve = Curve.Straight, Width = 2 }, Stroke = new() { Curve = Curve.Straight, Width = 2 },
Legend = new() Legend = new()
{ {
Position = LegendPosition.Top, Position = LegendPosition.Top,
HorizontalAlign = ApexCharts.Align.Right, HorizontalAlign = ApexCharts.Align.Right,
Floating = true, Floating = true,
OffsetX = -5, OffsetX = -5,
OffsetY = -25 OffsetY = -25
}, },
Theme = new() Theme = new()
{ {
Mode = mode, Mode = mode,
Palette = PaletteType.Palette8, Palette = PaletteType.Palette8,
} }
}; };
return options; return options;
} }
private void OnZoomed(ZoomedData<TimeSeries> zoomedData) private void OnZoomed(ZoomedData<TimeSeries> zoomedData)
{ {
if (zoomedData.XAxis?.Min == null && zoomedData.XAxis?.Max == null) if (zoomedData.XAxis?.Min == null && zoomedData.XAxis?.Max == null)
return; return;
DateTimeOffset xMin; DateTimeOffset xMin;
DateTimeOffset xMax; DateTimeOffset xMax;
xMin = zoomedData.XAxis?.Min == null xMin = zoomedData.XAxis?.Min == null
? _currentData!.Series.First().Data.Min(e => e.TimeStamp.Date) ? _currentData!.Series.First().Data.Min(e => e.TimeStamp.Date)
: DateTimeOffset.FromUnixTimeMilliseconds((long)zoomedData.XAxis.Min) : DateTimeOffset.FromUnixTimeMilliseconds((long)zoomedData.XAxis.Min)
; ;
xMax = zoomedData.XAxis?.Max == null xMax = zoomedData.XAxis?.Max == null
? _currentData!.Series.First().Data.Max(e => e.TimeStamp.Date) ? _currentData!.Series.First().Data.Max(e => e.TimeStamp.Date)
: DateTimeOffset.FromUnixTimeMilliseconds((long)zoomedData.XAxis.Max) : DateTimeOffset.FromUnixTimeMilliseconds((long)zoomedData.XAxis.Max)
; ;
MinDate = xMin.UtcDateTime; MinDate = xMin.UtcDateTime;
MinDateChanged.InvokeAsync(MinDate); MinDateChanged.InvokeAsync(MinDate);
MaxDate = xMax.UtcDateTime; MaxDate = xMax.UtcDateTime;
MaxDateChanged.InvokeAsync(MaxDate); MaxDateChanged.InvokeAsync(MaxDate);
} }
} }

View File

@ -14,13 +14,13 @@
<h4>@Text</h4> <h4>@Text</h4>
<MudTextField T="string" ReadOnly="true" Text="@_progressText"></MudTextField> <MudTextField T="string" ReadOnly="true" Text="@_progressText"></MudTextField>
@if (_isTraining == false) @if (_isTraining == false && _evaluationMetrics != null)
{ {
<MudText>MicroAccuracy: @_evaluationMetrics!.MicroAccuracy.ToString("N2")</MudText> <MudText>MicroAccuracy: @_evaluationMetrics.MicroAccuracy.ToString("N6")</MudText>
<MudText>MacroAccuracy: @_evaluationMetrics!.MacroAccuracy.ToString("N2")</MudText> <MudText>MacroAccuracy: @_evaluationMetrics.MacroAccuracy.ToString("N6")</MudText>
<MudText>LogLoss: @_evaluationMetrics!.LogLoss.ToString("N2")</MudText> <MudText>LogLoss: @_evaluationMetrics.LogLoss.ToString("N6")</MudText>
<MudText>LogLossReduction: @_evaluationMetrics!.LogLossReduction.ToString("N2")</MudText> <MudText>LogLossReduction: @_evaluationMetrics.LogLossReduction.ToString("N6")</MudText>
} }
</DialogContent> </DialogContent>
@ -29,32 +29,43 @@
</DialogActions> </DialogActions>
</MudDialog> </MudDialog>
@code { @code {
[CascadingParameter] MudDialogInstance? MudDialog { get; set; } [CascadingParameter] MudDialogInstance? MudDialog { get; set; }
[Parameter] public MLProcessor? Processor { get; set; } [Parameter] public MLProcessor? Processor { get; set; }
[Parameter] public ModelDefinition? Model { get; set; } [Parameter] public ModelDefinition? Model { get; set; }
[Parameter] public string Text { get; set; } = ""; [Parameter] public string Text { get; set; } = "";
private string _progressText = ""; private string _progressText = "";
private bool _isTraining = true; private bool _isTraining = true;
private MLEvaluationMetrics? _evaluationMetrics; private MLEvaluationMetrics? _evaluationMetrics;
void Submit() => MudDialog?.Close(DialogResult.Ok(true)); void Submit() => MudDialog?.Close(DialogResult.Ok(true));
protected override async Task OnAfterRenderAsync(bool firstRender) protected override async Task OnAfterRenderAsync(bool firstRender)
{
if (!firstRender || Processor==null || Model==null)
{
return;
}
try
{ {
if (!firstRender || Processor==null || Model==null)
{
return;
}
_evaluationMetrics = await Processor.Train(Model, UpdateProgress); _evaluationMetrics = await Processor.Train(Model, UpdateProgress);
}
catch (Exception e)
{
_progressText = "ERROR: " + e.Message;
}
finally
{
_isTraining = false; _isTraining = false;
await InvokeAsync(StateHasChanged); await InvokeAsync(StateHasChanged);
} }
}
private async void UpdateProgress(string message)
{ private async void UpdateProgress(string message)
_progressText = message; {
await InvokeAsync(StateHasChanged); _progressText = message;
} await InvokeAsync(StateHasChanged);
}
} }

View File

@ -29,4 +29,55 @@ public class DataSourceDefinition
public string Description { get; set; } = string.Empty; public string Description { get; set; } = string.Empty;
public override string ToString() => Name; public override string ToString() => Name;
public List<string> GetColumnNames()
{
var measureNames = new[] { "min", "max", "avg", "mean" };
var columnNames = new List<string>();
foreach (var item in Queries)
{
columnNames.AddRange(measureNames.Select(x => $"{item.Query}_{x}"));
}
return columnNames;
}
public static string ConvertToCsv(List<TimeSeriesDataSet> source)
{
var data = "";
for (var i = 0; i < source.Count; i++)
{
var queryData = source[i];
var min = queryData.Data.Min(x => x.Value);
var max = queryData.Data.Max(x => x.Value);
var avg = queryData.Data.Average(x => x.Value);
var mean = queryData.Data.Sum(x => x.Value) / queryData.Data.Count;
data += min + "," + max + "," + avg + "," + mean + ",";
}
return data.TrimEnd(',');
}
public static float[] ToFeatures(List<TimeSeriesDataSet> source)
{
var data = new float[source.Count * 4];
for (var i = 0; i < source.Count; i++)
{
var queryData = source[i];
var min = queryData.Data.Min(x => x.Value);
var max = queryData.Data.Max(x => x.Value);
var avg = queryData.Data.Average(x => x.Value);
var mean = queryData.Data.Sum(x => x.Value) / queryData.Data.Count;
data[i*4 + 0] = min;
data[i*4 + 1] = max;
data[i*4 + 2] = avg;
data[i*4 + 3] = mean;
}
return data;
}
} }

View File

@ -1,22 +1,21 @@
namespace DeepTrace.Data namespace DeepTrace.Data;
public class IntervalDefinition
{ {
public class IntervalDefinition public IntervalDefinition() { }
public IntervalDefinition(DateTime from, DateTime to, string name)
{ {
public IntervalDefinition() { } From = from;
public IntervalDefinition(DateTime from, DateTime to, string name) To = to;
{ Name = name;
From = from;
To = to;
Name = name;
}
public DateTime From { get; set; } = DateTime.MinValue;
public DateTime To { get; set; } = DateTime.MaxValue;
public string Name { get; set; } = string.Empty;
public List<TimeSeriesDataSet> Data { get; set; } = new();
} }
public DateTime From { get; set; } = DateTime.MinValue;
public DateTime To { get; set; } = DateTime.MaxValue;
public string Name { get; set; } = string.Empty;
public List<TimeSeriesDataSet> Data { get; set; } = new();
} }

View File

@ -5,185 +5,188 @@ using System;
using System.Linq; using System.Linq;
using System.IO; using System.IO;
using System.Collections.Generic; using System.Collections.Generic;
namespace DeepTrace namespace DeepTrace;
#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
public partial class MLModel1
{ {
public partial class MLModel1 /// <summary>
/// model input class for MLModel1.
/// </summary>
#region model input class
public class ModelInput
{ {
/// <summary> [ColumnName(@"Q1min")]
/// model input class for MLModel1. public string Q1min { get; set; }
/// </summary>
#region model input class
public class ModelInput
{
[ColumnName(@"Q1min")]
public string Q1min { get; set; }
[ColumnName(@"Q1max")] [ColumnName(@"Q1max")]
public string Q1max { get; set; } public string Q1max { get; set; }
[ColumnName(@"Q1avg")] [ColumnName(@"Q1avg")]
public string Q1avg { get; set; } public string Q1avg { get; set; }
[ColumnName(@"Q1mean")] [ColumnName(@"Q1mean")]
public string Q1mean { get; set; } public string Q1mean { get; set; }
[ColumnName(@"Q2min")] [ColumnName(@"Q2min")]
public string Q2min { get; set; } public string Q2min { get; set; }
[ColumnName(@"Q2max")] [ColumnName(@"Q2max")]
public string Q2max { get; set; } public string Q2max { get; set; }
[ColumnName(@"Q2avg")] [ColumnName(@"Q2avg")]
public string Q2avg { get; set; } public string Q2avg { get; set; }
[ColumnName(@"Q2mean")] [ColumnName(@"Q2mean")]
public string Q2mean { get; set; } public string Q2mean { get; set; }
[ColumnName(@"Q3min")] [ColumnName(@"Q3min")]
public string Q3min { get; set; } public string Q3min { get; set; }
[ColumnName(@"Q3max")] [ColumnName(@"Q3max")]
public string Q3max { get; set; } public string Q3max { get; set; }
[ColumnName(@"Q3avg")] [ColumnName(@"Q3avg")]
public string Q3avg { get; set; } public string Q3avg { get; set; }
[ColumnName(@"Q3mean")] [ColumnName(@"Q3mean")]
public string Q3mean { get; set; } public string Q3mean { get; set; }
[ColumnName(@"Q4min")] [ColumnName(@"Q4min")]
public string Q4min { get; set; } public string Q4min { get; set; }
[ColumnName(@"Q4max")] [ColumnName(@"Q4max")]
public string Q4max { get; set; } public string Q4max { get; set; }
[ColumnName(@"Q4avg")] [ColumnName(@"Q4avg")]
public string Q4avg { get; set; } public string Q4avg { get; set; }
[ColumnName(@"Q4mean")] [ColumnName(@"Q4mean")]
public string Q4mean { get; set; } public string Q4mean { get; set; }
[ColumnName(@"Q5min")] [ColumnName(@"Q5min")]
public string Q5min { get; set; } public string Q5min { get; set; }
[ColumnName(@"Q5max")] [ColumnName(@"Q5max")]
public string Q5max { get; set; } public string Q5max { get; set; }
[ColumnName(@"Q5avg")] [ColumnName(@"Q5avg")]
public string Q5avg { get; set; } public string Q5avg { get; set; }
[ColumnName(@"Q5mean")] [ColumnName(@"Q5mean")]
public string Q5mean { get; set; } public string Q5mean { get; set; }
[ColumnName(@"Name")] [ColumnName(@"Name")]
public string Name { get; set; } public string Name { get; set; }
} }
#endregion #endregion
/// <summary> /// <summary>
/// model output class for MLModel1. /// model output class for MLModel1.
/// </summary> /// </summary>
#region model output class #region model output class
public class ModelOutput public class ModelOutput
{ {
[ColumnName(@"Q1min")] [ColumnName(@"Q1min")]
public string Q1min { get; set; } public string Q1min { get; set; }
[ColumnName(@"Q1max")] [ColumnName(@"Q1max")]
public float[] Q1max { get; set; } public float[] Q1max { get; set; }
[ColumnName(@"Q1avg")] [ColumnName(@"Q1avg")]
public float[] Q1avg { get; set; } public float[] Q1avg { get; set; }
[ColumnName(@"Q1mean")] [ColumnName(@"Q1mean")]
public float[] Q1mean { get; set; } public float[] Q1mean { get; set; }
[ColumnName(@"Q2min")] [ColumnName(@"Q2min")]
public float[] Q2min { get; set; } public float[] Q2min { get; set; }
[ColumnName(@"Q2max")] [ColumnName(@"Q2max")]
public float[] Q2max { get; set; } public float[] Q2max { get; set; }
[ColumnName(@"Q2avg")] [ColumnName(@"Q2avg")]
public float[] Q2avg { get; set; } public float[] Q2avg { get; set; }
[ColumnName(@"Q2mean")] [ColumnName(@"Q2mean")]
public float[] Q2mean { get; set; } public float[] Q2mean { get; set; }
[ColumnName(@"Q3min")] [ColumnName(@"Q3min")]
public float[] Q3min { get; set; } public float[] Q3min { get; set; }
[ColumnName(@"Q3max")] [ColumnName(@"Q3max")]
public float[] Q3max { get; set; } public float[] Q3max { get; set; }
[ColumnName(@"Q3avg")] [ColumnName(@"Q3avg")]
public float[] Q3avg { get; set; } public float[] Q3avg { get; set; }
[ColumnName(@"Q3mean")] [ColumnName(@"Q3mean")]
public float[] Q3mean { get; set; } public float[] Q3mean { get; set; }
[ColumnName(@"Q4min")] [ColumnName(@"Q4min")]
public string Q4min { get; set; } public string Q4min { get; set; }
[ColumnName(@"Q4max")] [ColumnName(@"Q4max")]
public float[] Q4max { get; set; } public float[] Q4max { get; set; }
[ColumnName(@"Q4avg")] [ColumnName(@"Q4avg")]
public float[] Q4avg { get; set; } public float[] Q4avg { get; set; }
[ColumnName(@"Q4mean")] [ColumnName(@"Q4mean")]
public float[] Q4mean { get; set; } public float[] Q4mean { get; set; }
[ColumnName(@"Q5min")] [ColumnName(@"Q5min")]
public float[] Q5min { get; set; } public float[] Q5min { get; set; }
[ColumnName(@"Q5max")] [ColumnName(@"Q5max")]
public float[] Q5max { get; set; } public float[] Q5max { get; set; }
[ColumnName(@"Q5avg")] [ColumnName(@"Q5avg")]
public float[] Q5avg { get; set; } public float[] Q5avg { get; set; }
[ColumnName(@"Q5mean")] [ColumnName(@"Q5mean")]
public float[] Q5mean { get; set; } public float[] Q5mean { get; set; }
[ColumnName(@"Name")] [ColumnName(@"Name")]
public uint Name { get; set; } public uint Name { get; set; }
[ColumnName(@"Features")] [ColumnName(@"Features")]
public float[] Features { get; set; } public float[] Features { get; set; }
[ColumnName(@"PredictedLabel")] [ColumnName(@"PredictedLabel")]
public string PredictedLabel { get; set; } public string PredictedLabel { get; set; }
[ColumnName(@"Score")] [ColumnName(@"Score")]
public float[] Score { get; set; } public float[] Score { get; set; }
} }
#endregion #endregion
private static string MLNetModelPath = Path.GetFullPath("MLModel1.zip"); #pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
public static readonly Lazy<PredictionEngine<ModelInput, ModelOutput>> PredictEngine = new Lazy<PredictionEngine<ModelInput, ModelOutput>>(() => CreatePredictEngine(), true); private static string MLNetModelPath = Path.GetFullPath("MLModel1.zip");
/// <summary> public static readonly Lazy<PredictionEngine<ModelInput, ModelOutput>> PredictEngine = new Lazy<PredictionEngine<ModelInput, ModelOutput>>(() => CreatePredictEngine(), true);
/// Use this method to predict on <see cref="ModelInput"/>.
/// </summary>
/// <param name="input">model input.</param>
/// <returns><seealso cref=" ModelOutput"/></returns>
public static ModelOutput Predict(ModelInput input)
{
var predEngine = PredictEngine.Value;
return predEngine.Predict(input);
}
private static PredictionEngine<ModelInput, ModelOutput> CreatePredictEngine() /// <summary>
{ /// Use this method to predict on <see cref="ModelInput"/>.
var mlContext = new MLContext(); /// </summary>
ITransformer mlModel = mlContext.Model.Load(MLNetModelPath, out var _); /// <param name="input">model input.</param>
return mlContext.Model.CreatePredictionEngine<ModelInput, ModelOutput>(mlModel); /// <returns><seealso cref=" ModelOutput"/></returns>
} public static ModelOutput Predict(ModelInput input)
{
var predEngine = PredictEngine.Value;
return predEngine.Predict(input);
}
private static PredictionEngine<ModelInput, ModelOutput> CreatePredictEngine()
{
var mlContext = new MLContext();
ITransformer mlModel = mlContext.Model.Load(MLNetModelPath, out var _);
return mlContext.Model.CreatePredictionEngine<ModelInput, ModelOutput>(mlModel);
} }
} }

View File

@ -9,56 +9,55 @@ using Microsoft.ML.Trainers.FastTree;
using Microsoft.ML.Trainers; using Microsoft.ML.Trainers;
using Microsoft.ML; using Microsoft.ML;
namespace DeepTrace namespace DeepTrace;
public partial class MLModel1
{ {
public partial class MLModel1 /// <summary>
/// Retrains model using the pipeline generated as part of the training process. For more information on how to load data, see aka.ms/loaddata.
/// </summary>
/// <param name="mlContext"></param>
/// <param name="trainData"></param>
/// <returns></returns>
public static ITransformer RetrainPipeline(MLContext mlContext, IDataView trainData)
{ {
/// <summary> var pipeline = BuildPipeline(mlContext);
/// Retrains model using the pipeline generated as part of the training process. For more information on how to load data, see aka.ms/loaddata. var model = pipeline.Fit(trainData);
/// </summary>
/// <param name="mlContext"></param>
/// <param name="trainData"></param>
/// <returns></returns>
public static ITransformer RetrainPipeline(MLContext mlContext, IDataView trainData)
{
var pipeline = BuildPipeline(mlContext);
var model = pipeline.Fit(trainData);
return model; return model;
} }
/// <summary> /// <summary>
/// build the pipeline that is used from model builder. Use this function to retrain model. /// build the pipeline that is used from model builder. Use this function to retrain model.
/// </summary> /// </summary>
/// <param name="mlContext"></param> /// <param name="mlContext"></param>
/// <returns></returns> /// <returns></returns>
public static IEstimator<ITransformer> BuildPipeline(MLContext mlContext) public static IEstimator<ITransformer> BuildPipeline(MLContext mlContext)
{ {
// Data process configuration with pipeline data transformations // Data process configuration with pipeline data transformations
var pipeline = mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q1max",outputColumnName:@"Q1max") var pipeline = mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q1max",outputColumnName:@"Q1max")
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q1avg",outputColumnName:@"Q1avg")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q1avg",outputColumnName:@"Q1avg"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q1mean",outputColumnName:@"Q1mean")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q1mean",outputColumnName:@"Q1mean"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q2min",outputColumnName:@"Q2min")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q2min",outputColumnName:@"Q2min"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q2max",outputColumnName:@"Q2max")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q2max",outputColumnName:@"Q2max"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q2avg",outputColumnName:@"Q2avg")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q2avg",outputColumnName:@"Q2avg"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q2mean",outputColumnName:@"Q2mean")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q2mean",outputColumnName:@"Q2mean"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q3min",outputColumnName:@"Q3min")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q3min",outputColumnName:@"Q3min"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q3max",outputColumnName:@"Q3max")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q3max",outputColumnName:@"Q3max"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q3avg",outputColumnName:@"Q3avg")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q3avg",outputColumnName:@"Q3avg"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q3mean",outputColumnName:@"Q3mean")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q3mean",outputColumnName:@"Q3mean"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q4max",outputColumnName:@"Q4max")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q4max",outputColumnName:@"Q4max"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q4avg",outputColumnName:@"Q4avg")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q4avg",outputColumnName:@"Q4avg"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q4mean",outputColumnName:@"Q4mean")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q4mean",outputColumnName:@"Q4mean"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q5min",outputColumnName:@"Q5min")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q5min",outputColumnName:@"Q5min"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q5max",outputColumnName:@"Q5max")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q5max",outputColumnName:@"Q5max"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q5avg",outputColumnName:@"Q5avg")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q5avg",outputColumnName:@"Q5avg"))
.Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q5mean",outputColumnName:@"Q5mean")) .Append(mlContext.Transforms.Text.FeaturizeText(inputColumnName:@"Q5mean",outputColumnName:@"Q5mean"))
.Append(mlContext.Transforms.Concatenate(@"Features", new []{@"Q1max",@"Q1avg",@"Q1mean",@"Q2min",@"Q2max",@"Q2avg",@"Q2mean",@"Q3min",@"Q3max",@"Q3avg",@"Q3mean",@"Q4max",@"Q4avg",@"Q4mean",@"Q5min",@"Q5max",@"Q5avg",@"Q5mean"})) .Append(mlContext.Transforms.Concatenate(@"Features", new []{@"Q1max",@"Q1avg",@"Q1mean",@"Q2min",@"Q2max",@"Q2avg",@"Q2mean",@"Q3min",@"Q3max",@"Q3avg",@"Q3mean",@"Q4max",@"Q4avg",@"Q4mean",@"Q5min",@"Q5max",@"Q5avg",@"Q5mean"}))
.Append(mlContext.Transforms.Conversion.MapValueToKey(outputColumnName:@"Name",inputColumnName:@"Name")) .Append(mlContext.Transforms.Conversion.MapValueToKey(outputColumnName:@"Name",inputColumnName:@"Name"))
.Append(mlContext.MulticlassClassification.Trainers.OneVersusAll(binaryEstimator:mlContext.BinaryClassification.Trainers.FastTree(new FastTreeBinaryTrainer.Options(){NumberOfLeaves=33,MinimumExampleCountPerLeaf=14,NumberOfTrees=4,MaximumBinCountPerFeature=1022,FeatureFraction=0.99999999,LearningRate=0.757926844134433,LabelColumnName=@"Name",FeatureColumnName=@"Features"}),labelColumnName: @"Name")) .Append(mlContext.MulticlassClassification.Trainers.OneVersusAll(binaryEstimator:mlContext.BinaryClassification.Trainers.FastTree(new FastTreeBinaryTrainer.Options(){NumberOfLeaves=33,MinimumExampleCountPerLeaf=14,NumberOfTrees=4,MaximumBinCountPerFeature=1022,FeatureFraction=0.99999999,LearningRate=0.757926844134433,LabelColumnName=@"Name",FeatureColumnName=@"Features"}),labelColumnName: @"Name"))
.Append(mlContext.Transforms.Conversion.MapKeyToValue(outputColumnName:@"PredictedLabel",inputColumnName:@"PredictedLabel")); .Append(mlContext.Transforms.Conversion.MapKeyToValue(outputColumnName:@"PredictedLabel",inputColumnName:@"PredictedLabel"));
return pipeline; return pipeline;
}
} }
} }

View File

@ -2,6 +2,7 @@
using MongoDB.Bson.Serialization.Attributes; using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Bson; using MongoDB.Bson;
using System.Text; using System.Text;
using DeepTrace.ML;
namespace DeepTrace.Data; namespace DeepTrace.Data;
@ -15,28 +16,21 @@ public class ModelDefinition
} }
[BsonId] [BsonId]
public ObjectId? Id { get; set; } public ObjectId? Id { get; set; }
public string Name { get; set; } public string Name { get; set; }
public DataSourceStorage DataSource { get; set; } = new(); public DataSourceStorage DataSource { get; set; } = new();
public string AIparameters { get; set; } = string.Empty; public string AIparameters { get; set; } = string.Empty;
public List<IntervalDefinition> IntervalDefinitionList { get; set; } = new(); public List<IntervalDefinition> IntervalDefinitionList { get; set; } = new();
public List<string> GetColumnNames() public List<string> GetColumnNames() => DataSource.GetColumnNames()
{ .Concat(new[] { "Name" })
var measureNames = new[] { "min", "max", "avg", "mean" }; .ToList()
var columnNames = new List<string>(); ;
foreach (var item in DataSource.Queries)
{
columnNames.AddRange(measureNames.Select(x => $"{item.Query}_{x}"));
}
columnNames.Add("Name");
return columnNames;
}
public string ToCsv() public string ToCsv()
{ {
var current = IntervalDefinitionList.First(); var current = IntervalDefinitionList.First();
var headers = string.Join(",", GetColumnNames().Select(x=>$"\"{x}\"")); var headers = string.Join(",", GetColumnNames().Select(x => $"\"{x}\""));
var writer = new StringBuilder(); var writer = new StringBuilder();
@ -45,30 +39,24 @@ public class ModelDefinition
foreach (var currentInterval in IntervalDefinitionList) foreach (var currentInterval in IntervalDefinitionList)
{ {
var source = currentInterval.Data; var source = currentInterval.Data;
string data = ConvertToCsv(source); string data = DataSourceDefinition.ConvertToCsv(source);
data += "," + currentInterval.Name; data += $",\"{currentInterval.Name}\"";
writer.AppendLine(data); writer.AppendLine(data);
} }
return writer.ToString(); return writer.ToString();
} }
public static string ConvertToCsv(List<TimeSeriesDataSet> source) public IEnumerable<MLInputData> ToInput()
{ {
var data = ""; foreach (var currentInterval in IntervalDefinitionList)
for (var i = 0; i < source.Count; i++)
{ {
var source = currentInterval.Data;
var queryData = source[i]; yield return new MLInputData
var min = queryData.Data.Min(x => x.Value); {
var max = queryData.Data.Max(x => x.Value); Features = DataSourceDefinition.ToFeatures(source),
var avg = queryData.Data.Average(x => x.Value); Label = currentInterval.Name
var mean = queryData.Data.Sum(x => x.Value) / queryData.Data.Count; };
data += min + "," + max + "," + avg + "," + mean + ",";
} }
return data+"\"ignoreMe\"";
} }
} }

View File

@ -4,9 +4,9 @@ namespace DeepTrace.Data;
public class Prediction public class Prediction
{ {
[ColumnName(@"PredictedLabel")] [ColumnName("PredictedLabel")]
public string PredictedLabel { get; set; } public string PredictedLabel { get; set; } = string.Empty;
[ColumnName(@"Score")] [ColumnName("Score")]
public float[] Score { get; set; } public float[] Score { get; set; } = Array.Empty<float>();
} }

View File

@ -1,14 +1,14 @@
using MongoDB.Bson.Serialization.Attributes; using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Bson; using MongoDB.Bson;
namespace DeepTrace.Data namespace DeepTrace.Data;
public class TrainedModelDefinition
{ {
public class TrainedModelDefinition [BsonId]
{ public ObjectId? Id { get; set; }
[BsonId] public bool IsEnabled { get; set; } = false;
public ObjectId? Id { get; set; } public string Name { get; set; } = string.Empty;
public bool IsEnabled { get; set; } = false; public DataSourceDefinition? DataSource{ get; set;}
public string Name { get; set; } = string.Empty; public byte[] Value { get; set; } = Array.Empty<byte>(); //base64
public byte[] Value { get; set; } = Array.Empty<byte>(); //base64
}
} }

View File

@ -1,47 +1,31 @@
using DeepTrace.Data; using DeepTrace.Data;
using Microsoft.ML; using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers; using Microsoft.ML.Trainers;
namespace DeepTrace.ML namespace DeepTrace.ML;
public class EstimatorBuilder : IEstimatorBuilder
{ {
public class EstimatorBuilder : IEstimatorBuilder public IEstimator<ITransformer> BuildPipeline(MLContext mlContext, ModelDefinition model)
{ {
public IEstimator<ITransformer> BuildPipeline(MLContext mlContext, ModelDefinition model) return
{ mlContext.Transforms.NormalizeMinMax(inputColumnName: nameof(MLInputData.Features),outputColumnName: "Features")
IEstimator<ITransformer>? pipeline = null; .Append(mlContext.Transforms.Conversion.MapValueToKey(inputColumnName: nameof(MLInputData.Label), outputColumnName: "Label"))
var ds = model.DataSource; // .AppendCacheCheckpoint(mlContext)
.Append(mlContext.MulticlassClassification.Trainers.OneVersusAll(
var measureNames = new[] { "min", "max", "avg", "mean" }; binaryEstimator: mlContext.BinaryClassification.Trainers.LbfgsLogisticRegression(
var columnNames = new List<string>(); new LbfgsLogisticRegressionBinaryTrainer.Options
foreach (var item in ds.Queries)
{
var estimators = measureNames.Select(x => mlContext.Transforms.Text.FeaturizeText(inputColumnName: $"{item.Query}_{x}", outputColumnName: $"{item.Query}_{x}"));
columnNames.AddRange(measureNames.Select(x => $"{item.Query}_{x}"));
foreach (var e in estimators)
{
if (pipeline == null)
{ {
pipeline = e; L1Regularization = 1F,
L2Regularization = 1F,
LabelColumnName = "Label",
FeatureColumnName = "Features"
} }
else ))
{ )
pipeline = pipeline.Append(e); .Append(mlContext.Transforms.Conversion.MapKeyToValue(nameof(MLOutputData.PredictedLabel), inputColumnName: "PredictedLabel"));
}
}
}
pipeline = pipeline!
.Append(mlContext.Transforms.Concatenate(@"Features", columnNames.ToArray()))
.Append(mlContext.Transforms.Conversion.MapValueToKey(outputColumnName: @"Name", inputColumnName: @"Name"))
.Append(mlContext.Transforms.NormalizeMinMax(@"Features", @"Features"))
.Append(mlContext.MulticlassClassification.Trainers.OneVersusAll(binaryEstimator: mlContext.BinaryClassification.Trainers.LbfgsLogisticRegression(new LbfgsLogisticRegressionBinaryTrainer.Options() { L1Regularization = 1F, L2Regularization = 1F, LabelColumnName = @"Name", FeatureColumnName = @"Features" }), labelColumnName: @"Name"))
.Append(mlContext.Transforms.Conversion.MapKeyToValue(outputColumnName: @"PredictedLabel", inputColumnName: @"PredictedLabel"));
return pipeline;
}
} }
} }

View File

@ -1,10 +1,9 @@
using DeepTrace.Data; using DeepTrace.Data;
using Microsoft.ML; using Microsoft.ML;
namespace DeepTrace.ML namespace DeepTrace.ML;
public interface IEstimatorBuilder
{ {
public interface IEstimatorBuilder IEstimator<ITransformer> BuildPipeline(MLContext mlContext, ModelDefinition model);
{
IEstimator<ITransformer> BuildPipeline(MLContext mlContext, ModelDefinition model);
}
} }

View File

@ -10,3 +10,8 @@ public interface IMLProcessor
void Import(byte[] data); void Import(byte[] data);
Task<Prediction> Predict(TrainedModelDefinition trainedModel, ModelDefinition model, List<TimeSeriesDataSet> data); Task<Prediction> Predict(TrainedModelDefinition trainedModel, ModelDefinition model, List<TimeSeriesDataSet> data);
} }
public interface IMLProcessorFactory
{
IMLProcessor Create();
}

View File

@ -1,11 +1,10 @@
using PrometheusAPI; using PrometheusAPI;
namespace DeepTrace.ML namespace DeepTrace.ML;
public interface IMeasure
{ {
public interface IMeasure public string Name { get; }
{ void Reset();
public string Name { get; } float Calculate(IEnumerable<TimeSeries> data);
void Reset();
float Calculate(IEnumerable<TimeSeries> data);
}
} }

View File

@ -1,16 +1,15 @@
namespace DeepTrace.ML namespace DeepTrace.ML;
public class MLEvaluationMetrics
{ {
public class MLEvaluationMetrics public MLEvaluationMetrics()
{ {
public MLEvaluationMetrics()
{
}
public double MicroAccuracy { get; set; }
public double MacroAccuracy { get; set; }
public double LogLoss { get; set; }
public double LogLossReduction { get; set; }
} }
public double MicroAccuracy { get; set; }
public double MacroAccuracy { get; set; }
public double LogLoss { get; set; }
public double LogLossReduction { get; set; }
} }

View File

@ -7,6 +7,21 @@ namespace DeepTrace.ML;
public record ModelRecord(MLContext Context, DataViewSchema Schema, ITransformer Transformer); public record ModelRecord(MLContext Context, DataViewSchema Schema, ITransformer Transformer);
public class MLInputData
{
public string Label { get; set; } = "Normal operation";
public float[] Features { get; set; } = Array.Empty<float>();
}
public class MLOutputData
{
public string PredictedLabel { get; set; } = string.Empty;
public float[] Score { get; set; } = Array.Empty<float>();
}
public static class MLHelpers public static class MLHelpers
{ {
public static byte[] ExportSingleModel( ModelRecord model) public static byte[] ExportSingleModel( ModelRecord model)
@ -32,10 +47,22 @@ public static class MLHelpers
await File.WriteAllTextAsync(fileName, csv); await File.WriteAllTextAsync(fileName, csv);
return LoadFromCsv(mlContext, model, fileName); return (LoadFromCsv(mlContext, model, fileName), fileName);
} }
public static (IDataView View, string FileName) LoadFromCsv(MLContext mlContext, ModelDefinition model, string fileName) public static Task<IDataView> ToInput(MLContext mlContext, ModelDefinition model)
{
var input = model.ToInput().ToList();
// VectorType attribute with dynamic dimension
// https://github.com/dotnet/machinelearning/issues/164
var schemaDef = SchemaDefinition.Create(typeof(MLInputData));
schemaDef["Features"].ColumnType = new VectorDataViewType(NumberDataViewType.Single, input.First().Features.Length );
return Task.FromResult(mlContext.Data.LoadFromEnumerable(input, schemaDef));
}
public static IDataView LoadFromCsv(MLContext mlContext, ModelDefinition model, string fileName)
{ {
var columnNames = model.GetColumnNames(); var columnNames = model.GetColumnNames();
var columns = columnNames var columns = columnNames
@ -43,8 +70,14 @@ public static class MLHelpers
.ToArray() .ToArray()
; ;
var view = mlContext.Data.LoadFromTextFile(fileName, columns, separatorChar: ',', hasHeader: true, allowQuoting: true, trimWhitespace: true); var view = mlContext.Data.LoadFromTextFile(
fileName,
columns,
separatorChar: ',',
hasHeader: true,
allowQuoting: true,
trimWhitespace: true);
return (view, fileName); return view;
} }
} }

View File

@ -1,137 +1,155 @@
using DeepTrace.Data; using DeepTrace.Data;
using Microsoft.ML; using Microsoft.ML;
using Microsoft.ML.Data;
using PrometheusAPI;
using System.Data; using System.Data;
using static DeepTrace.MLModel1;
namespace DeepTrace.ML namespace DeepTrace.ML;
internal class MLProcessorFactory : IMLProcessorFactory
{ {
public class MLProcessor : IMLProcessor private readonly ILogger<MLProcessor> _logger;
private IEstimatorBuilder _estimatorBuilder;
public MLProcessorFactory(ILogger<MLProcessor> logger, IEstimatorBuilder estimatorBuilder)
{ {
private MLContext _mlContext = new MLContext(); _logger = logger;
private EstimatorBuilder _estimatorBuilder = new EstimatorBuilder(); _estimatorBuilder = estimatorBuilder;
private DataViewSchema? _schema; }
private ITransformer? _transformer;
private static string _signature = "DeepTrace-Model-v1-" + typeof(MLProcessor).Name;
private readonly ILogger<MLProcessor> _logger;
public MLProcessor(ILogger<MLProcessor> logger) public IMLProcessor Create() => new MLProcessor(_logger, _estimatorBuilder);
}
/// <summary>
/// Wrapper for ML.NET operations.
/// </summary>
public class MLProcessor : IMLProcessor
{
private readonly ILogger<MLProcessor> _logger;
private MLContext _mlContext = new MLContext();
private IEstimatorBuilder _estimatorBuilder;
private DataViewSchema? _schema;
private ITransformer? _transformer;
private static string _signature = "DeepTrace-Model-v1-" + typeof(MLProcessor).Name;
private PredictionEngine<MLInputData, MLOutputData>? _predictionEngine;
public MLProcessor(ILogger<MLProcessor> logger, IEstimatorBuilder estimatorBuilder)
{
_logger = logger;
_estimatorBuilder = estimatorBuilder;
}
private string Name { get; set; } = "TestModel";
public async Task<MLEvaluationMetrics> Train(ModelDefinition modelDef, Action<string> log)
{
_logger.LogInformation("Training started");
Name = modelDef.Name;
var pipeline = _estimatorBuilder.BuildPipeline(_mlContext, modelDef);
var data = await MLHelpers.ToInput(_mlContext, modelDef);
DataOperationsCatalog.TrainTestData dataSplit = _mlContext.Data.TrainTestSplit(data, testFraction: 0.2);
_mlContext.Log += (_,e) => LogEvents(log, e);
try
{ {
_logger = logger; _schema = data.Schema;
_transformer = pipeline.Fit(dataSplit.TrainSet);
return Evaluate(dataSplit.TestSet);
} }
finally
private string Name { get; set; } = "TestModel";
public async Task<MLEvaluationMetrics> Train(ModelDefinition modelDef, Action<string> log)
{ {
var pipeline = _estimatorBuilder.BuildPipeline(_mlContext, modelDef); _logger.LogInformation("Training finished");
var (data, filename) = await MLHelpers.Convert(_mlContext, modelDef);
DataOperationsCatalog.TrainTestData dataSplit = _mlContext.Data.TrainTestSplit(data, testFraction: 0.2);
_mlContext.Log += (_,e) => LogEvents(log, e);
try
{
_schema = data.Schema;
_transformer = pipeline.Fit(dataSplit.TrainSet);
return Evaluate(dataSplit.TestSet);
}
finally
{
File.Delete(filename);
}
}
private void LogEvents(Action<string> log, LoggingEventArgs e)
{
if(e.Kind.ToString() != "Trace")
{
_logger.LogDebug(e.Message);
log(e.Message);
}
}
private MLEvaluationMetrics Evaluate(IDataView testData)
{
var predictions = _transformer!.Transform(testData);
var metrics = _mlContext.MulticlassClassification.Evaluate(predictions, "Name");
var evaluationMetrics = new MLEvaluationMetrics()
{
MicroAccuracy = metrics.MicroAccuracy,
MacroAccuracy = metrics.MacroAccuracy,
LogLoss = metrics.LogLoss,
LogLossReduction = metrics.LogLossReduction,
};
return evaluationMetrics;
}
public byte[] Export()
{
if(_schema == null)
{
throw new ArgumentNullException(nameof (_schema));
}
if (_transformer == null)
{
throw new ArgumentNullException(nameof(_transformer));
}
using var mem = new MemoryStream();
mem.WriteString(_signature);
mem.WriteString(Name);
var bytes = MLHelpers.ExportSingleModel(new ModelRecord(_mlContext, _schema, _transformer));
mem.WriteInt(bytes.Length);
mem.Write(bytes);
return mem.ToArray();
}
public void Import(byte[] data)
{
var mem = new MemoryStream(data);
var sig = mem.ReadString();
if (sig != _signature)
throw new ApplicationException($"Wrong data for {GetType().Name}");
Name = mem.ReadString();
var size = mem.ReadInt();
var bytes = new byte[size];
mem.Read(bytes, 0, bytes.Length);
(_mlContext, _schema, _transformer) = MLHelpers.ImportSingleModel(bytes);
}
public async Task<Prediction> Predict(TrainedModelDefinition trainedModel, ModelDefinition model, List<TimeSeriesDataSet> data)
{
Import(trainedModel.Value);
var headers = string.Join(",", model.GetColumnNames().Select(x => $"\"{x}\""));
var row = ModelDefinition.ConvertToCsv(data);
var csv = headers+"\n"+row;
var fileName = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(fileName, csv);
var (dataView, _) = MLHelpers.LoadFromCsv(_mlContext, model, fileName);
var predictionEngine = _mlContext.Model.CreatePredictionEngine<IDataView, Prediction>(_transformer);
var prediction = predictionEngine.Predict(dataView);
return prediction;
}
finally
{
File.Delete(fileName);
}
} }
} }
private void LogEvents(Action<string> log, LoggingEventArgs e)
{
if(e.Kind.ToString() != "Trace")
{
_logger.LogDebug(e.Message);
log(e.Message);
}
}
private MLEvaluationMetrics Evaluate(IDataView testData)
{
// https://learn.microsoft.com/en-us/dotnet/api/microsoft.ml.standardtrainerscatalog.lbfgslogisticregression?view=ml-dotnet
var predictions = _transformer!.Transform(testData);
var metrics = _mlContext.MulticlassClassification.Evaluate(predictions, nameof(MLInputData.Label));
var evaluationMetrics = new MLEvaluationMetrics()
{
MicroAccuracy = metrics.MicroAccuracy,
MacroAccuracy = metrics.MacroAccuracy,
LogLoss = metrics.LogLoss,
LogLossReduction = metrics.LogLossReduction,
};
return evaluationMetrics;
}
public byte[] Export()
{
if(_schema == null)
{
throw new ArgumentNullException(nameof (_schema));
}
if (_transformer == null)
{
throw new ArgumentNullException(nameof(_transformer));
}
using var mem = new MemoryStream();
mem.WriteString(_signature);
mem.WriteString(Name);
var bytes = MLHelpers.ExportSingleModel(new ModelRecord(_mlContext, _schema, _transformer));
mem.WriteInt(bytes.Length);
mem.Write(bytes);
return mem.ToArray();
}
public void Import(byte[] data)
{
var mem = new MemoryStream(data);
var sig = mem.ReadString();
if (sig != _signature)
throw new ApplicationException($"Wrong data for {GetType().Name}");
Name = mem.ReadString();
var size = mem.ReadInt();
var bytes = new byte[size];
mem.Read(bytes, 0, bytes.Length);
(_mlContext, _schema, _transformer) = MLHelpers.ImportSingleModel(bytes);
}
public Task<Prediction> Predict(TrainedModelDefinition trainedModel, ModelDefinition model, List<TimeSeriesDataSet> data)
{
Name = trainedModel.Name;
if (_transformer == null )
Import(trainedModel.Value);
if (_predictionEngine == null)
{
_predictionEngine = _mlContext.Model.CreatePredictionEngine<MLInputData, MLOutputData>(_transformer, _schema);
}
var input = new MLInputData
{
Features = DataSourceDefinition.ToFeatures(data)
};
var prediction = _predictionEngine.Predict( input );
return Task.FromResult( new Prediction { PredictedLabel = prediction.PredictedLabel, Score = prediction.Score } );
}
} }

View File

@ -1,89 +1,87 @@
using PrometheusAPI; using PrometheusAPI;
namespace DeepTrace.ML namespace DeepTrace.ML;
public class MeasureMin : IMeasure
{ {
public class MeasureMin : IMeasure public string Name => "Min";
{ public float Calculate(IEnumerable<TimeSeries> data) =>
public string Name => "Min"; data
public float Calculate(IEnumerable<TimeSeries> data) => .Where(x => x.Value != 0.0f)
data .Min( x => x.Value )
.Where(x => x.Value != 0.0f) ;
.Min( x => x.Value )
;
public void Reset() { } public void Reset() { }
} }
public class MeasureMax : IMeasure public class MeasureMax : IMeasure
{ {
public string Name => "Max"; public string Name => "Max";
public float Calculate(IEnumerable<TimeSeries> data) => data.Max(x => x.Value); public float Calculate(IEnumerable<TimeSeries> data) => data.Max(x => x.Value);
public void Reset() { } public void Reset() { }
} }
public class MeasureAvg : IMeasure public class MeasureAvg : IMeasure
{ {
public string Name => "Avg"; public string Name => "Avg";
public float Calculate(IEnumerable<TimeSeries> data) => data.Average(x => x.Value); public float Calculate(IEnumerable<TimeSeries> data) => data.Average(x => x.Value);
public void Reset() { } public void Reset() { }
} }
/// <summary> /// <summary>
/// WARNING: Only works with fixed length interval /// WARNING: Only works with fixed length interval
/// </summary> /// </summary>
public class MeasureSum : IMeasure public class MeasureSum : IMeasure
{ {
public string Name => "Sum"; public string Name => "Sum";
public float Calculate(IEnumerable<TimeSeries> data) => data.Sum(x => x.Value); public float Calculate(IEnumerable<TimeSeries> data) => data.Sum(x => x.Value);
public void Reset() { } public void Reset() { }
} }
public class MeasureMedian : IMeasure public class MeasureMedian : IMeasure
{ {
public string Name => "Median"; public string Name => "Median";
public float Calculate(IEnumerable<TimeSeries> data) public float Calculate(IEnumerable<TimeSeries> data)
=> MedianHelper.Median(data, x => x.Value); => MedianHelper.Median(data, x => x.Value);
public void Reset() { } public void Reset() { }
}
public class MeasureDiff<T> : IMeasure where T : IMeasure, new()
{
private T _measure = new();
public string Name => "Diff_"+_measure.Name;
private float _prev = float.NaN;
public float Calculate(IEnumerable<TimeSeries> data)
{
var val = _measure.Calculate(data);
if (float.IsNaN(_prev))
{
_prev = val;
return 0.0f;
}
val = val - _prev;
_prev = val;
return val;
}
public void Reset()
{
_measure.Reset();
_prev = float.NaN;
}
}
public class MeasureDiffMin : MeasureDiff<MeasureMin> { }
public class MeasureDiffMax : MeasureDiff<MeasureMax> { }
public class MeasureDiffAvg : MeasureDiff<MeasureAvg> { }
/// <summary>
/// WARNING: Only works with fixed length interval
/// </summary>
public class MeasureDiffSum : MeasureDiff<MeasureSum> { }
public class MeasureDiffMedian : MeasureDiff<MeasureMedian> { }
} }
public class MeasureDiff<T> : IMeasure where T : IMeasure, new()
{
private T _measure = new();
public string Name => "Diff_"+_measure.Name;
private float _prev = float.NaN;
public float Calculate(IEnumerable<TimeSeries> data)
{
var val = _measure.Calculate(data);
if (float.IsNaN(_prev))
{
_prev = val;
return 0.0f;
}
val = val - _prev;
_prev = val;
return val;
}
public void Reset()
{
_measure.Reset();
_prev = float.NaN;
}
}
public class MeasureDiffMin : MeasureDiff<MeasureMin> { }
public class MeasureDiffMax : MeasureDiff<MeasureMax> { }
public class MeasureDiffAvg : MeasureDiff<MeasureAvg> { }
/// <summary>
/// WARNING: Only works with fixed length interval
/// </summary>
public class MeasureDiffSum : MeasureDiff<MeasureSum> { }
public class MeasureDiffMedian : MeasureDiff<MeasureMedian> { }

View File

@ -58,8 +58,8 @@
int pos = i; int pos = i;
<MudItem xs="10"> <MudItem xs="10">
@*<MudTextField Label="Query" @bind-Value="_queryForm.Source.Queries[pos].Query" Variant="Variant.Text" InputType="InputType.Search" Lines="2" />*@ <MudTextField Label="Query" @bind-Value="_queryForm.Source.Queries[pos].Query" Variant="Variant.Text" InputType="InputType.Search" Lines="2" />
<MudAutocomplete Label="Query" @bind-Value="_queryForm.Source.Queries[pos].Query" Lines="1" Variant="Variant.Text" SearchFunc="@SearchForQuery"></MudAutocomplete> @*<MudAutocomplete Label="Query" @bind-Value="_queryForm.Source.Queries[pos].Query" Lines="1" Variant="Variant.Text" SearchFunc="@SearchForQuery"></MudAutocomplete>*@
</MudItem> </MudItem>
<MudItem xs="1"> <MudItem xs="1">
<MudIconButton Icon="@Icons.Material.Outlined.Add" Variant="Variant.Outlined" aria-label="add" OnClick="@(() => AddQuery(pos))" /> <MudIconButton Icon="@Icons.Material.Outlined.Add" Variant="Variant.Outlined" aria-label="add" OnClick="@(() => AddQuery(pos))" />

View File

@ -19,11 +19,11 @@
<h2 class="text-danger">An error occurred while processing your request.</h2> <h2 class="text-danger">An error occurred while processing your request.</h2>
@if (Model.ShowRequestId) @if (Model.ShowRequestId)
{ {
<p> <p>
<strong>Request ID:</strong> <code>@Model.RequestId</code> <strong>Request ID:</strong> <code>@Model.RequestId</code>
</p> </p>
} }
<h3>Development Mode</h3> <h3>Development Mode</h3>
<p> <p>

View File

@ -2,26 +2,25 @@
using Microsoft.AspNetCore.Mvc.RazorPages; using Microsoft.AspNetCore.Mvc.RazorPages;
using System.Diagnostics; using System.Diagnostics;
namespace DeepTrace.Pages namespace DeepTrace.Pages;
[ResponseCache(Duration = 0, Location = ResponseCacheLocation.None, NoStore = true)]
[IgnoreAntiforgeryToken]
public class ErrorModel : PageModel
{ {
[ResponseCache(Duration = 0, Location = ResponseCacheLocation.None, NoStore = true)] public string? RequestId { get; set; }
[IgnoreAntiforgeryToken]
public class ErrorModel : PageModel public bool ShowRequestId => !string.IsNullOrEmpty(RequestId);
private readonly ILogger<ErrorModel> _logger;
public ErrorModel(ILogger<ErrorModel> logger)
{ {
public string? RequestId { get; set; } _logger = logger;
}
public bool ShowRequestId => !string.IsNullOrEmpty(RequestId); public void OnGet()
{
private readonly ILogger<ErrorModel> _logger; RequestId = Activity.Current?.Id ?? HttpContext.TraceIdentifier;
public ErrorModel(ILogger<ErrorModel> logger)
{
_logger = logger;
}
public void OnGet()
{
RequestId = Activity.Current?.Id ?? HttpContext.TraceIdentifier;
}
} }
} }

View File

@ -14,9 +14,9 @@ Welcome to your new app.
@if (_trainedModels != null) @if (_trainedModels != null)
{ {
@foreach(TrainedModelDefinition model in _trainedModels) @foreach(TrainedModelDefinition model in _trainedModels)
{ {
<ModelCard Model="@model"/> <ModelCard Model="@model"/>
} }
} else } else
{ {
<MudText>Nothing to display</MudText> <MudText>Nothing to display</MudText>
@ -24,13 +24,13 @@ Welcome to your new app.
@code{ @code{
private List<TrainedModelDefinition> _trainedModels = new(); private List<TrainedModelDefinition> _trainedModels = new();
protected override async Task OnInitializedAsync() protected override async Task OnInitializedAsync()
{ {
base.OnInitialized(); base.OnInitialized();
_trainedModels = await TrainedModelService.Load(); _trainedModels = await TrainedModelService.Load();
} }
} }

View File

@ -18,7 +18,7 @@
@inject IEstimatorBuilder EstimatorBuilder @inject IEstimatorBuilder EstimatorBuilder
@inject NavigationManager NavManager @inject NavigationManager NavManager
@inject IJSRuntime Js @inject IJSRuntime Js
@inject ILogger<MLProcessor> MLProcessorLogger @inject IMLProcessorFactory MlProcessorFactory
<PageTitle>Training</PageTitle> <PageTitle>Training</PageTitle>
@ -531,14 +531,14 @@
private async Task HandleTrain() private async Task HandleTrain()
{ {
var mlProcessor = new MLProcessor(MLProcessorLogger);
MLProcessorLogger.LogInformation("Training started");
var options = new DialogOptions var options = new DialogOptions
{ {
CloseOnEscapeKey = true CloseOnEscapeKey = true
}; };
var parameters = new DialogParameters(); var parameters = new DialogParameters();
var mlProcessor = MlProcessorFactory.Create();
parameters.Add(nameof(Controls.TrainingDialog.Text), _modelForm!.CurrentModel.Name); parameters.Add(nameof(Controls.TrainingDialog.Text), _modelForm!.CurrentModel.Name);
parameters.Add(nameof(Controls.TrainingDialog.Processor), mlProcessor); parameters.Add(nameof(Controls.TrainingDialog.Processor), mlProcessor);
parameters.Add(nameof(Controls.TrainingDialog.Model), _modelForm.CurrentModel); parameters.Add(nameof(Controls.TrainingDialog.Model), _modelForm.CurrentModel);
@ -546,7 +546,6 @@
var d = DialogService.Show<Controls.TrainingDialog>("Training", parameters, options); var d = DialogService.Show<Controls.TrainingDialog>("Training", parameters, options);
var res = await d.Result; var res = await d.Result;
MLProcessorLogger.LogInformation("Training finished");
var bytes = mlProcessor.Export(); var bytes = mlProcessor.Export();
//save to Mongo //save to Mongo

View File

@ -29,6 +29,7 @@ builder.Services
.AddSingleton<IModelStorageService, ModelStorageService>() .AddSingleton<IModelStorageService, ModelStorageService>()
.AddSingleton<ITrainedModelStorageService, TrainedModelStorageService>() .AddSingleton<ITrainedModelStorageService, TrainedModelStorageService>()
.AddSingleton<IEstimatorBuilder, EstimatorBuilder>() .AddSingleton<IEstimatorBuilder, EstimatorBuilder>()
.AddSingleton<IMLProcessorFactory, MLProcessorFactory>()
; ;
var app = builder.Build(); var app = builder.Build();

View File

@ -1,58 +1,57 @@
using MongoDB.Bson; using MongoDB.Bson;
using MongoDB.Driver; using MongoDB.Driver;
namespace DeepTrace.Services namespace DeepTrace.Services;
public class DataSourceStorageService : IDataSourceStorageService
{ {
public class DataSourceStorageService : IDataSourceStorageService
private const string MongoDBDatabaseName = "DeepTrace";
private const string MongoDBCollection = "Sources";
private readonly IMongoClient _client;
public DataSourceStorageService(IMongoClient client)
{ {
_client = client;
}
private const string MongoDBDatabaseName = "DeepTrace"; public async Task<List<DataSourceStorage>> Load()
private const string MongoDBCollection = "Sources"; {
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<DataSourceStorage>(MongoDBCollection);
private readonly IMongoClient _client; var res = await (await collection.FindAsync("{}")).ToListAsync();
return res;
}
public async Task Store(DataSourceStorage source)
{
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<DataSourceStorage>(MongoDBCollection);
public DataSourceStorageService(IMongoClient client) if ( source.Id == null )
source.Id = ObjectId.GenerateNewId();
// use upsert (insert or update) to automatically handle subsequent updates
await collection.ReplaceOneAsync(
filter: new BsonDocument("_id", source.Id),
options: new ReplaceOptions { IsUpsert = true },
replacement: source
);
}
public async Task Delete(DataSourceStorage source, bool ignoreNotStored = false)
{
if ( source.Id == null )
{ {
_client = client; if (!ignoreNotStored)
throw new InvalidDataException("Source was not stored yet. There is nothing to delete");
return;
} }
public async Task<List<DataSourceStorage>> Load() var db = _client.GetDatabase(MongoDBDatabaseName);
{ var collection = db.GetCollection<DataSourceStorage>(MongoDBCollection);
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<DataSourceStorage>(MongoDBCollection);
var res = await (await collection.FindAsync("{}")).ToListAsync(); await collection.DeleteOneAsync($"_id = {source.Id}");
return res;
}
public async Task Store(DataSourceStorage source)
{
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<DataSourceStorage>(MongoDBCollection);
if ( source.Id == null )
source.Id = ObjectId.GenerateNewId();
// use upsert (insert or update) to automatically handle subsequent updates
await collection.ReplaceOneAsync(
filter: new BsonDocument("_id", source.Id),
options: new ReplaceOptions { IsUpsert = true },
replacement: source
);
}
public async Task Delete(DataSourceStorage source, bool ignoreNotStored = false)
{
if ( source.Id == null )
{
if (!ignoreNotStored)
throw new InvalidDataException("Source was not stored yet. There is nothing to delete");
return;
}
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<DataSourceStorage>(MongoDBCollection);
await collection.DeleteOneAsync($"_id = {source.Id}");
}
} }
} }

View File

@ -2,37 +2,36 @@
using MongoDB.Bson.Serialization.Attributes; using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Bson; using MongoDB.Bson;
namespace DeepTrace.Services namespace DeepTrace.Services;
public class DataSourceStorage : DataSourceDefinition, IEquatable<DataSourceStorage>
{ {
public class DataSourceStorage : DataSourceDefinition, IEquatable<DataSourceStorage> [BsonId]
public ObjectId? Id { get; set; }
public override bool Equals(object? obj)
{ {
[BsonId] if ( obj is DataSourceStorage other )
public ObjectId? Id { get; set; }
public override bool Equals(object? obj)
{ {
if ( obj is DataSourceStorage other ) return Id == other.Id;
{
return Id == other.Id;
}
return false;
}
public bool Equals(DataSourceStorage? other)
{
return Id == other?.Id;
}
public override int GetHashCode()
{
return Id?.GetHashCode() ?? base.GetHashCode();
} }
return false;
} }
public interface IDataSourceStorageService public bool Equals(DataSourceStorage? other)
{ {
Task Delete(DataSourceStorage source, bool ignoreNotStored = false); return Id == other?.Id;
Task<List<DataSourceStorage>> Load(); }
Task Store(DataSourceStorage source);
public override int GetHashCode()
{
return Id?.GetHashCode() ?? base.GetHashCode();
} }
} }
public interface IDataSourceStorageService
{
Task Delete(DataSourceStorage source, bool ignoreNotStored = false);
Task<List<DataSourceStorage>> Load();
Task Store(DataSourceStorage source);
}

View File

@ -3,14 +3,13 @@ using MongoDB.Bson;
using DeepTrace.Data; using DeepTrace.Data;
using System.Text; using System.Text;
namespace DeepTrace.Services namespace DeepTrace.Services;
{
public interface IModelStorageService
{ public interface IModelStorageService
Task Delete(ModelDefinition source, bool ignoreNotStored = false); {
Task<List<ModelDefinition>> Load(); Task Delete(ModelDefinition source, bool ignoreNotStored = false);
Task<ModelDefinition?> Load(BsonObjectId id); Task<List<ModelDefinition>> Load();
Task Store(ModelDefinition source); Task<ModelDefinition?> Load(BsonObjectId id);
} Task Store(ModelDefinition source);
} }

View File

@ -1,11 +1,10 @@
using DeepTrace.Data; using DeepTrace.Data;
namespace DeepTrace.Services namespace DeepTrace.Services;
public interface ITrainedModelStorageService
{ {
public interface ITrainedModelStorageService Task Delete(TrainedModelDefinition source, bool ignoreNotStored = false);
{ Task<List<TrainedModelDefinition>> Load();
Task Delete(TrainedModelDefinition source, bool ignoreNotStored = false); Task Store(TrainedModelDefinition source);
Task<List<TrainedModelDefinition>> Load();
Task Store(TrainedModelDefinition source);
}
} }

View File

@ -2,67 +2,67 @@
using MongoDB.Bson; using MongoDB.Bson;
using MongoDB.Driver; using MongoDB.Driver;
namespace DeepTrace.Services namespace DeepTrace.Services;
public class ModelStorageService : IModelStorageService
{ {
public class ModelStorageService : IModelStorageService
private const string MongoDBDatabaseName = "DeepTrace";
private const string MongoDBCollection = "Models";
private readonly IMongoClient _client;
public ModelStorageService(IMongoClient client)
{ {
_client = client;
}
private const string MongoDBDatabaseName = "DeepTrace"; public async Task<List<ModelDefinition>> Load()
private const string MongoDBCollection = "Models"; {
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<ModelDefinition>(MongoDBCollection);
private readonly IMongoClient _client; var res = await (await collection.FindAsync("{}")).ToListAsync();
return res;
}
public ModelStorageService(IMongoClient client) public async Task<ModelDefinition?> Load(BsonObjectId id)
{
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<ModelDefinition>(MongoDBCollection);
var res = await (await collection.FindAsync($"{{ _id : ObjectId(\"{id}\") }}")).ToListAsync();
return res.FirstOrDefault();
}
public async Task Store(ModelDefinition source)
{
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<ModelDefinition>(MongoDBCollection);
if (source.Id == null)
source.Id = ObjectId.GenerateNewId();
// use upsert (insert or update) to automatically handle subsequent updates
await collection.ReplaceOneAsync(
filter: new BsonDocument("_id", source.Id),
options: new ReplaceOptions { IsUpsert = true },
replacement: source
);
}
public async Task Delete(ModelDefinition source, bool ignoreNotStored = false)
{
if (source.Id == null)
{ {
_client = client; if (!ignoreNotStored)
throw new InvalidDataException("Source was not stored yet. There is nothing to delete");
return;
} }
public async Task<List<ModelDefinition>> Load() var db = _client.GetDatabase(MongoDBDatabaseName);
{ var collection = db.GetCollection<ModelDefinition>(MongoDBCollection);
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<ModelDefinition>(MongoDBCollection);
var res = await (await collection.FindAsync("{}")).ToListAsync(); await collection.DeleteOneAsync(filter: new BsonDocument("_id", source.Id));
return res;
}
public async Task<ModelDefinition?> Load(BsonObjectId id)
{
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<ModelDefinition>(MongoDBCollection);
var res = (await (await collection.FindAsync($"{{_id:ObjectId(\"{id}\")}}")).ToListAsync()).FirstOrDefault();
return res;
}
public async Task Store(ModelDefinition source)
{
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<ModelDefinition>(MongoDBCollection);
if (source.Id == null)
source.Id = ObjectId.GenerateNewId();
// use upsert (insert or update) to automatically handle subsequent updates
await collection.ReplaceOneAsync(
filter: new BsonDocument("_id", source.Id),
options: new ReplaceOptions { IsUpsert = true },
replacement: source
);
}
public async Task Delete(ModelDefinition source, bool ignoreNotStored = false)
{
if (source.Id == null)
{
if (!ignoreNotStored)
throw new InvalidDataException("Source was not stored yet. There is nothing to delete");
return;
}
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<ModelDefinition>(MongoDBCollection);
await collection.DeleteOneAsync(filter: new BsonDocument("_id", source.Id));
}
} }
} }

View File

@ -2,57 +2,56 @@
using MongoDB.Bson; using MongoDB.Bson;
using MongoDB.Driver; using MongoDB.Driver;
namespace DeepTrace.Services namespace DeepTrace.Services;
public class TrainedModelStorageService: ITrainedModelStorageService
{ {
public class TrainedModelStorageService: ITrainedModelStorageService private const string MongoDBDatabaseName = "DeepTrace";
private const string MongoDBCollection = "TrainedModels";
private readonly IMongoClient _client;
public TrainedModelStorageService(IMongoClient client)
{ {
private const string MongoDBDatabaseName = "DeepTrace"; _client = client;
private const string MongoDBCollection = "TrainedModels"; }
private readonly IMongoClient _client; public async Task<List<TrainedModelDefinition>> Load()
{
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<TrainedModelDefinition>(MongoDBCollection);
public TrainedModelStorageService(IMongoClient client) var res = await (await collection.FindAsync("{}")).ToListAsync();
return res;
}
public async Task Store(TrainedModelDefinition source)
{
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<TrainedModelDefinition>(MongoDBCollection);
if (source.Id == null)
source.Id = ObjectId.GenerateNewId();
// use upsert (insert or update) to automatically handle subsequent updates
await collection.ReplaceOneAsync(
filter: new BsonDocument("_id", source.Id),
options: new ReplaceOptions { IsUpsert = true },
replacement: source
);
}
public async Task Delete(TrainedModelDefinition source, bool ignoreNotStored = false)
{
if (source.Id == null)
{ {
_client = client; if (!ignoreNotStored)
throw new InvalidDataException("Source was not stored yet. There is nothing to delete");
return;
} }
public async Task<List<TrainedModelDefinition>> Load() var db = _client.GetDatabase(MongoDBDatabaseName);
{ var collection = db.GetCollection<TrainedModelDefinition>(MongoDBCollection);
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<TrainedModelDefinition>(MongoDBCollection);
var res = await (await collection.FindAsync("{}")).ToListAsync(); await collection.DeleteOneAsync(filter: new BsonDocument("_id", source.Id));
return res;
}
public async Task Store(TrainedModelDefinition source)
{
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<TrainedModelDefinition>(MongoDBCollection);
if (source.Id == null)
source.Id = ObjectId.GenerateNewId();
// use upsert (insert or update) to automatically handle subsequent updates
await collection.ReplaceOneAsync(
filter: new BsonDocument("_id", source.Id),
options: new ReplaceOptions { IsUpsert = true },
replacement: source
);
}
public async Task Delete(TrainedModelDefinition source, bool ignoreNotStored = false)
{
if (source.Id == null)
{
if (!ignoreNotStored)
throw new InvalidDataException("Source was not stored yet. There is nothing to delete");
return;
}
var db = _client.GetDatabase(MongoDBDatabaseName);
var collection = db.GetCollection<TrainedModelDefinition>(MongoDBCollection);
await collection.DeleteOneAsync(filter: new BsonDocument("_id", source.Id));
}
} }
} }

View File

@ -10,7 +10,7 @@
</div> </div>
@code { @code {
// Demonstrates how a parent component can supply parameters // Demonstrates how a parent component can supply parameters
[Parameter] [Parameter]
public string? Title { get; set; } public string? Title { get; set; }
} }

View File

@ -6,20 +6,19 @@ using System.Text.Json.Serialization;
using System.Text.Json; using System.Text.Json;
using System.Threading.Tasks; using System.Threading.Tasks;
namespace PrometheusAPI namespace PrometheusAPI;
{
public static class JsonSetializerSetup
{
private static JsonSerializerOptions _options = new JsonSerializerOptions
{
AllowTrailingCommas = true,
ReadCommentHandling = JsonCommentHandling.Skip,
NumberHandling =
JsonNumberHandling.AllowReadingFromString |
JsonNumberHandling.WriteAsString,
PropertyNameCaseInsensitive = true
};
public static JsonSerializerOptions Options => _options; public static class JsonSetializerSetup
} {
private static JsonSerializerOptions _options = new JsonSerializerOptions
{
AllowTrailingCommas = true,
ReadCommentHandling = JsonCommentHandling.Skip,
NumberHandling =
JsonNumberHandling.AllowReadingFromString |
JsonNumberHandling.WriteAsString,
PropertyNameCaseInsensitive = true
};
public static JsonSerializerOptions Options => _options;
} }

View File

@ -1,119 +1,118 @@
using System.Text.Json; using System.Text.Json;
namespace PrometheusAPI namespace PrometheusAPI;
public class PrometheusClient
{ {
public class PrometheusClient private readonly HttpClient _client;
public PrometheusClient(HttpClient client)
{ {
private readonly HttpClient _client; _client = client;
}
public PrometheusClient(HttpClient client) public async Task<InstantQueryResponse> InstantQuery(string query, DateTime? time = null, CancellationToken token = default)
{
var q = new List<KeyValuePair<string, string>>
{ {
_client = client; new KeyValuePair<string, string>("query", query)
};
if (time != null)
q.Add(new KeyValuePair<string, string>("time", TimeSeries.DateTimeToUnixTimestamp(time.Value).ToString("F3")));
var form = new FormUrlEncodedContent(q);
var response = await _client.PostAsync("/api/v1/query", form);
var json = await response.Content.ReadAsStringAsync()
?? throw new InvalidDataException("Responce is null");
var res = JsonSerializer.Deserialize<InstantQueryResponse>(json, JsonSetializerSetup.Options)
?? throw new InvalidDataException("Can't convert responce to InstantQueryResponse");
return res;
}
public async Task<InstantQueryResponse> RangeQuery(string query, DateTime start, DateTime end, TimeSpan step, TimeSpan timeout = default, CancellationToken token = default)
{
var q = new List<KeyValuePair<string, string>>
{
new KeyValuePair<string, string>("query", query),
new KeyValuePair<string, string>("start", TimeSeries.DateTimeToUnixTimestamp(start).ToString("F3")),
new KeyValuePair<string, string>("end", TimeSeries.DateTimeToUnixTimestamp(end).ToString("F3")),
new KeyValuePair<string, string>("step", step.TotalSeconds.ToString("F3"))
};
if( timeout != default )
{
q.Add(new KeyValuePair<string, string>("timeout", timeout.TotalSeconds.ToString("F3")));
} }
public async Task<InstantQueryResponse> InstantQuery(string query, DateTime? time = null, CancellationToken token = default) var form = new FormUrlEncodedContent(q);
var response = await _client.PostAsync("/api/v1/query_range", form);
var json = await response.Content.ReadAsStringAsync()
?? throw new InvalidDataException("Responce is null");
var res = JsonSerializer.Deserialize<InstantQueryResponse>(json, JsonSetializerSetup.Options)
?? throw new InvalidDataException("Can't convert responce to InstantQueryResponse");
return res;
}
public async Task<string> FormatQuery(string query, CancellationToken token = default)
{
var q = new List<KeyValuePair<string, string>>
{ {
var q = new List<KeyValuePair<string, string>> new KeyValuePair<string, string>("query", query),
{ };
new KeyValuePair<string, string>("query", query)
};
if (time != null)
q.Add(new KeyValuePair<string, string>("time", TimeSeries.DateTimeToUnixTimestamp(time.Value).ToString("F3")));
var form = new FormUrlEncodedContent(q);
var response = await _client.PostAsync("/api/v1/query", form);
var json = await response.Content.ReadAsStringAsync()
?? throw new InvalidDataException("Responce is null");
var res = JsonSerializer.Deserialize<InstantQueryResponse>(json, JsonSetializerSetup.Options)
?? throw new InvalidDataException("Can't convert responce to InstantQueryResponse");
return res;
}
public async Task<InstantQueryResponse> RangeQuery(string query, DateTime start, DateTime end, TimeSpan step, TimeSpan timeout = default, CancellationToken token = default)
{
var q = new List<KeyValuePair<string, string>>
{
new KeyValuePair<string, string>("query", query),
new KeyValuePair<string, string>("start", TimeSeries.DateTimeToUnixTimestamp(start).ToString("F3")),
new KeyValuePair<string, string>("end", TimeSeries.DateTimeToUnixTimestamp(end).ToString("F3")),
new KeyValuePair<string, string>("step", step.TotalSeconds.ToString("F3"))
};
if( timeout != default )
{
q.Add(new KeyValuePair<string, string>("timeout", timeout.TotalSeconds.ToString("F3")));
}
var form = new FormUrlEncodedContent(q);
var response = await _client.PostAsync("/api/v1/query_range", form);
var json = await response.Content.ReadAsStringAsync()
?? throw new InvalidDataException("Responce is null");
var res = JsonSerializer.Deserialize<InstantQueryResponse>(json, JsonSetializerSetup.Options)
?? throw new InvalidDataException("Can't convert responce to InstantQueryResponse");
return res;
}
public async Task<string> FormatQuery(string query, CancellationToken token = default)
{
var q = new List<KeyValuePair<string, string>>
{
new KeyValuePair<string, string>("query", query),
};
var form = new FormUrlEncodedContent(q); var form = new FormUrlEncodedContent(q);
var response = await _client.PostAsync("/api/v1/format_query", form); var response = await _client.PostAsync("/api/v1/format_query", form);
var json = await response.Content.ReadAsStringAsync() var json = await response.Content.ReadAsStringAsync()
?? throw new InvalidDataException("Responce is null"); ?? throw new InvalidDataException("Responce is null");
var res = JsonSerializer.Deserialize<JsonDocument>(json, JsonSetializerSetup.Options) var res = JsonSerializer.Deserialize<JsonDocument>(json, JsonSetializerSetup.Options)
?? throw new InvalidDataException("Can't convert responce to JsonDocument"); ?? throw new InvalidDataException("Can't convert responce to JsonDocument");
var status = res.RootElement.GetProperty("status").GetString() var status = res.RootElement.GetProperty("status").GetString()
?? throw new InvalidDataException("Can't get status"); ?? throw new InvalidDataException("Can't get status");
if (!status.Equals("success", StringComparison.OrdinalIgnoreCase) ) if (!status.Equals("success", StringComparison.OrdinalIgnoreCase) )
throw new InvalidDataException(res.RootElement.GetProperty("error").GetString()); throw new InvalidDataException(res.RootElement.GetProperty("error").GetString());
var data = res.RootElement.GetProperty("data").GetString() var data = res.RootElement.GetProperty("data").GetString()
?? throw new InvalidDataException("Can't get formatted query"); ?? throw new InvalidDataException("Can't get formatted query");
return data; return data;
} }
public async Task<string[]> GetMetricsNames(CancellationToken token = default) public async Task<string[]> GetMetricsNames(CancellationToken token = default)
{ {
var response = await _client.GetAsync("/api/v1/label/__name__/values"); var response = await _client.GetAsync("/api/v1/label/__name__/values");
var json = await response.Content.ReadAsStringAsync() var json = await response.Content.ReadAsStringAsync()
?? throw new InvalidDataException("Responce is null"); ?? throw new InvalidDataException("Responce is null");
var res = JsonSerializer.Deserialize<JsonDocument>(json, JsonSetializerSetup.Options) var res = JsonSerializer.Deserialize<JsonDocument>(json, JsonSetializerSetup.Options)
?? throw new InvalidDataException("Can't convert responce to JsonDocument"); ?? throw new InvalidDataException("Can't convert responce to JsonDocument");
var status = res.RootElement.GetProperty("status").GetString() var status = res.RootElement.GetProperty("status").GetString()
?? throw new InvalidDataException("Can't get status"); ?? throw new InvalidDataException("Can't get status");
if (!status.Equals("success", StringComparison.OrdinalIgnoreCase)) if (!status.Equals("success", StringComparison.OrdinalIgnoreCase))
throw new InvalidDataException(res.RootElement.GetProperty("error").GetString()); throw new InvalidDataException(res.RootElement.GetProperty("error").GetString());
var data = res.RootElement.GetProperty("data").EnumerateArray().Select(x => x.GetString()).Where( x => x != null).Cast<string>().ToArray<string>() var data = res.RootElement.GetProperty("data").EnumerateArray().Select(x => x.GetString()).Where( x => x != null).Cast<string>().ToArray<string>()
?? throw new InvalidDataException("Can't get formatted query"); ?? throw new InvalidDataException("Can't get formatted query");
return data; return data;
}
} }
} }

View File

@ -1,52 +1,51 @@
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
namespace PrometheusAPI namespace PrometheusAPI;
internal class TimeSeriesCoverter : JsonConverter<TimeSeries?>
{ {
internal class TimeSeriesCoverter : JsonConverter<TimeSeries?> public override TimeSeries? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{ {
public override TimeSeries? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) if (reader.TokenType != JsonTokenType.StartArray)
throw new JsonException();
reader.Read();
if ( reader.TokenType != JsonTokenType.Number)
throw new JsonException();
var s = JsonSerializer.Deserialize<double>(ref reader, options);
reader.Read();
double f;
if (reader.TokenType == JsonTokenType.Number)
f = JsonSerializer.Deserialize<double>(ref reader, options);
else if (reader.TokenType == JsonTokenType.String)
f = Convert.ToDouble(JsonSerializer.Deserialize<string>(ref reader, options));
else
throw new JsonException();
reader.Read();
if (reader.TokenType != JsonTokenType.EndArray)
throw new JsonException();
return new TimeSeries(TimeSeries.UnixTimeStampToDateTime(s), (float)f);
}
public override void Write(Utf8JsonWriter writer, TimeSeries? value, JsonSerializerOptions options)
{
writer.WriteStartArray();
if (value != null)
{ {
if (reader.TokenType != JsonTokenType.StartArray) writer.WriteNumberValue(TimeSeries.DateTimeToUnixTimestamp(value.TimeStamp));
throw new JsonException(); writer.WriteNumberValue(value.Value);
reader.Read();
if ( reader.TokenType != JsonTokenType.Number)
throw new JsonException();
var s = JsonSerializer.Deserialize<double>(ref reader, options);
reader.Read();
double f;
if (reader.TokenType == JsonTokenType.Number)
f = JsonSerializer.Deserialize<double>(ref reader, options);
else if (reader.TokenType == JsonTokenType.String)
f = Convert.ToDouble(JsonSerializer.Deserialize<string>(ref reader, options));
else
throw new JsonException();
reader.Read();
if (reader.TokenType != JsonTokenType.EndArray)
throw new JsonException();
return new TimeSeries(TimeSeries.UnixTimeStampToDateTime(s), (float)f);
} }
public override void Write(Utf8JsonWriter writer, TimeSeries? value, JsonSerializerOptions options) writer.WriteEndArray();
{
writer.WriteStartArray();
if (value != null)
{
writer.WriteNumberValue(TimeSeries.DateTimeToUnixTimestamp(value.TimeStamp));
writer.WriteNumberValue(value.Value);
}
writer.WriteEndArray();
}
} }
} }