From c69deed50e81cc1805f6f82ebb10513a211cbbe2 Mon Sep 17 00:00:00 2001
From: Adam <103067949+AdamL-Microsoft@users.noreply.github.com>
Date: Tue, 29 Aug 2023 12:57:19 -0700
Subject: [PATCH 01/16] Release 8.7.1 (hotfix) (#3459)

* Remove the retention policy setting (#3452)

---------

Co-authored-by: Cheick Keita <chkeita@microsoft.com>
---
 .devcontainer/devcontainer.json               |   3 +-
 .github/workflows/ci.yml                      |   2 -
 CHANGELOG.md                                  |   6 +
 CURRENT_VERSION                               |   2 +-
 .../ApiService/Functions/QueueJobResult.cs    |  60 -------
 .../ApiService/OneFuzzTypes/Model.cs          |  45 -----
 src/ApiService/ApiService/Program.cs          |   1 -
 .../ApiService/onefuzzlib/Config.cs           |   1 -
 .../ApiService/onefuzzlib/Extension.cs        |  44 +++--
 .../onefuzzlib/JobResultOperations.cs         | 121 -------------
 .../ApiService/onefuzzlib/OnefuzzContext.cs   |   2 -
 .../IntegrationTests/Fakes/TestContext.cs     |   3 -
 src/agent/Cargo.lock                          |  16 --
 src/agent/Cargo.toml                          |   1 -
 src/agent/onefuzz-agent/src/config.rs         |  12 --
 src/agent/onefuzz-agent/src/log_uploader.rs   |  29 ++++
 src/agent/onefuzz-agent/src/work.rs           |   5 +-
 src/agent/onefuzz-result/Cargo.toml           |  18 --
 src/agent/onefuzz-result/src/job_result.rs    | 129 --------------
 src/agent/onefuzz-result/src/lib.rs           |   4 -
 src/agent/onefuzz-task/Cargo.toml             |   1 -
 src/agent/onefuzz-task/src/local/cmd.rs       |  42 ++++-
 src/agent/onefuzz-task/src/local/common.rs    |  26 ++-
 .../example_templates/libfuzzer_basic.yml     |  34 ++--
 .../src/local/generic_analysis.rs             | 137 ++++++++++++++-
 .../src/local/generic_crash_report.rs         | 138 ++++++++++++++-
 .../src/local/generic_generator.rs            | 142 ++++++++++++++-
 src/agent/onefuzz-task/src/local/libfuzzer.rs | 161 +++++++++++++++++-
 .../src/local/libfuzzer_crash_report.rs       | 128 +++++++++++++-
 .../onefuzz-task/src/local/libfuzzer_merge.rs |  84 ++++++++-
 .../src/local/libfuzzer_regression.rs         | 134 ++++++++++++++-
 .../src/local/libfuzzer_test_input.rs         |  83 +++++++++
 src/agent/onefuzz-task/src/local/mod.rs       |   1 +
 src/agent/onefuzz-task/src/local/radamsa.rs   |  78 +++++++++
 src/agent/onefuzz-task/src/local/schema.json  |   8 +-
 src/agent/onefuzz-task/src/local/template.rs  |  13 +-
 .../onefuzz-task/src/local/test_input.rs      |  86 ++++++++++
 .../src/tasks/analysis/generic.rs             |   5 +-
 src/agent/onefuzz-task/src/tasks/config.rs    |  20 ---
 .../src/tasks/coverage/generic.rs             |  19 +--
 .../onefuzz-task/src/tasks/fuzz/generator.rs  |   7 +-
 .../src/tasks/fuzz/libfuzzer/common.rs        |  49 ++----
 .../onefuzz-task/src/tasks/fuzz/supervisor.rs |  15 +-
 src/agent/onefuzz-task/src/tasks/heartbeat.rs |   2 +-
 .../onefuzz-task/src/tasks/merge/generic.rs   |   2 +-
 .../src/tasks/merge/libfuzzer_merge.rs        |   2 +-
 .../src/tasks/regression/common.rs            |  15 +-
 .../src/tasks/regression/generic.rs           |   3 +-
 .../src/tasks/regression/libfuzzer.rs         |   3 +-
 .../src/tasks/report/crash_report.rs          |  45 +----
 .../src/tasks/report/dotnet/generic.rs        |  22 +--
 .../onefuzz-task/src/tasks/report/generic.rs  |  14 +-
 .../src/tasks/report/libfuzzer_report.rs      |   5 -
 src/agent/onefuzz/Cargo.toml                  |   1 -
 src/agent/onefuzz/src/blob/url.rs             |  23 +--
 src/agent/onefuzz/src/syncdir.rs              |  66 +------
 .../bicep-templates/storageAccounts.bicep     |   2 +-
 src/integration-tests/integration-test.py     |  77 ++-------
 src/runtime-tools/linux/setup.sh              |  64 ++-----
 59 files changed, 1389 insertions(+), 872 deletions(-)
 delete mode 100644 src/ApiService/ApiService/Functions/QueueJobResult.cs
 delete mode 100644 src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs
 delete mode 100644 src/agent/onefuzz-result/Cargo.toml
 delete mode 100644 src/agent/onefuzz-result/src/job_result.rs
 delete mode 100644 src/agent/onefuzz-result/src/lib.rs
 create mode 100644 src/agent/onefuzz-task/src/local/radamsa.rs
 mode change 100644 => 100755 src/runtime-tools/linux/setup.sh

diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index d3fcf050ed..4059b3d7c1 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -13,7 +13,6 @@
 					"**/target/**": true
 				},
 				"lldb.executable": "/usr/bin/lldb",
-				"dotnet.server.useOmnisharp": true,
 				"omnisharp.enableEditorConfigSupport": true,
 				"omnisharp.enableRoslynAnalyzers": true,
 				"python.defaultInterpreterPath": "/workspaces/onefuzz/src/venv/bin/python",
@@ -49,4 +48,4 @@
 	"features": {
 		"ghcr.io/devcontainers/features/azure-cli:1": {}
 	}
-}
\ No newline at end of file
+}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 2dd85d7c92..12824fd182 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -542,11 +542,9 @@ jobs:
 
           mkdir -p artifacts/linux-libfuzzer
           mkdir -p artifacts/linux-libfuzzer-with-options
-          mkdir -p artifacts/mariner-libfuzzer
           (cd libfuzzer ; make )
           cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer
           cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer-with-options
-          cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/mariner-libfuzzer
 
           mkdir -p artifacts/linux-libfuzzer-regression
           (cd libfuzzer-regression ; make )
diff --git a/CHANGELOG.md b/CHANGELOG.md
index be4779ad77..8d46ea2a0e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,12 @@ All notable changes to this project will be documented in this file.
 The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
 and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
 
+## 8.7.1
+
+### Fixed
+
+* Service: Removed deprecated Azure retention policy setting that was causing scaleset deployment errors [#3452](https://github.com/microsoft/onefuzz/pull/3452)
+
 ## 8.7.0
 
 ### Added
diff --git a/CURRENT_VERSION b/CURRENT_VERSION
index c0bcaebe8f..efeecbe2c5 100644
--- a/CURRENT_VERSION
+++ b/CURRENT_VERSION
@@ -1 +1 @@
-8.7.0
\ No newline at end of file
+8.7.1
\ No newline at end of file
diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs
deleted file mode 100644
index d781a4d1e1..0000000000
--- a/src/ApiService/ApiService/Functions/QueueJobResult.cs
+++ /dev/null
@@ -1,60 +0,0 @@
-using System.Text.Json;
-using Microsoft.Azure.Functions.Worker;
-using Microsoft.Extensions.Logging;
-using Microsoft.OneFuzz.Service.OneFuzzLib.Orm;
-namespace Microsoft.OneFuzz.Service.Functions;
-
-
-public class QueueJobResult {
-    private readonly ILogger _log;
-    private readonly IOnefuzzContext _context;
-
-    public QueueJobResult(ILogger<QueueJobResult> logTracer, IOnefuzzContext context) {
-        _log = logTracer;
-        _context = context;
-    }
-
-    [Function("QueueJobResult")]
-    public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJobsStorage")] string msg) {
-
-        var _tasks = _context.TaskOperations;
-        var _jobs = _context.JobOperations;
-
-        _log.LogInformation("job result: {msg}", msg);
-        var jr = JsonSerializer.Deserialize<TaskJobResultEntry>(msg, EntityConverter.GetJsonSerializerOptions()).EnsureNotNull($"wrong data {msg}");
-
-        var task = await _tasks.GetByTaskId(jr.TaskId);
-        if (task == null) {
-            _log.LogWarning("invalid {TaskId}", jr.TaskId);
-            return;
-        }
-
-        var job = await _jobs.Get(task.JobId);
-        if (job == null) {
-            _log.LogWarning("invalid {JobId}", task.JobId);
-            return;
-        }
-
-        JobResultData? data = jr.Data;
-        if (data == null) {
-            _log.LogWarning($"job result data is empty, throwing out: {jr}");
-            return;
-        }
-
-        var jobResultType = data.Type;
-        _log.LogInformation($"job result data type: {jobResultType}");
-
-        Dictionary<string, double> value;
-        if (jr.Value.Count > 0) {
-            value = jr.Value;
-        } else {
-            _log.LogWarning($"job result data is empty, throwing out: {jr}");
-            return;
-        }
-
-        var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jobResultType, value);
-        if (!jobResult.IsOk) {
-            _log.LogError("failed to create or update with job result {JobId}", job.JobId);
-        }
-    }
-}
diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs
index b839f52ddc..e430c1448c 100644
--- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs
+++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs
@@ -33,19 +33,6 @@ public enum HeartbeatType {
     TaskAlive,
 }
 
-[SkipRename]
-public enum JobResultType {
-    NewCrashingInput,
-    NoReproCrashingInput,
-    NewReport,
-    NewUniqueReport,
-    NewRegressionReport,
-    NewCoverage,
-    NewCrashDump,
-    CoverageData,
-    RuntimeStats,
-}
-
 public record HeartbeatData(HeartbeatType Type);
 
 public record TaskHeartbeatEntry(
@@ -54,16 +41,6 @@ public record TaskHeartbeatEntry(
     Guid MachineId,
     HeartbeatData[] Data);
 
-public record JobResultData(JobResultType Type);
-
-public record TaskJobResultEntry(
-    Guid TaskId,
-    Guid? JobId,
-    Guid MachineId,
-    JobResultData Data,
-    Dictionary<string, double> Value
-    );
-
 public record NodeHeartbeatEntry(Guid NodeId, HeartbeatData[] Data);
 
 public record NodeCommandStopIfFree();
@@ -915,27 +892,6 @@ public record SecretAddress<T>(Uri Url) : ISecret<T> {
 public record SecretData<T>(ISecret<T> Secret) {
 }
 
-public record JobResult(
-    [PartitionKey][RowKey] Guid JobId,
-    string Project,
-    string Name,
-    double NewCrashingInput = 0,
-    double NoReproCrashingInput = 0,
-    double NewReport = 0,
-    double NewUniqueReport = 0,
-    double NewRegressionReport = 0,
-    double NewCrashDump = 0,
-    double InstructionsCovered = 0,
-    double TotalInstructions = 0,
-    double CoverageRate = 0,
-    double IterationCount = 0
-) : EntityBase() {
-    public JobResult(Guid JobId, string Project, string Name) : this(
-        JobId: JobId,
-        Project: Project,
-        Name: Name, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) { }
-}
-
 public record JobConfig(
     string Project,
     string Name,
@@ -1100,7 +1056,6 @@ public record TaskUnitConfig(
     string? InstanceTelemetryKey,
     string? MicrosoftTelemetryKey,
     Uri HeartbeatQueue,
-    Uri JobResultQueue,
     Dictionary<string, string> Tags
     ) {
     public Uri? inputQueue { get; set; }
diff --git a/src/ApiService/ApiService/Program.cs b/src/ApiService/ApiService/Program.cs
index d5ee30b45e..f425c00809 100644
--- a/src/ApiService/ApiService/Program.cs
+++ b/src/ApiService/ApiService/Program.cs
@@ -118,7 +118,6 @@ public static async Async.Task Main() {
                 .AddScoped<IVmOperations, VmOperations>()
                 .AddScoped<ISecretsOperations, SecretsOperations>()
                 .AddScoped<IJobOperations, JobOperations>()
-                .AddScoped<IJobResultOperations, JobResultOperations>()
                 .AddScoped<INsgOperations, NsgOperations>()
                 .AddScoped<IScheduler, Scheduler>()
                 .AddScoped<IConfig, Config>()
diff --git a/src/ApiService/ApiService/onefuzzlib/Config.cs b/src/ApiService/ApiService/onefuzzlib/Config.cs
index 872cedbc01..71af317348 100644
--- a/src/ApiService/ApiService/onefuzzlib/Config.cs
+++ b/src/ApiService/ApiService/onefuzzlib/Config.cs
@@ -71,7 +71,6 @@ private static BlobContainerSasPermissions ConvertPermissions(ContainerPermissio
             InstanceTelemetryKey: _serviceConfig.ApplicationInsightsInstrumentationKey,
             MicrosoftTelemetryKey: _serviceConfig.OneFuzzTelemetry,
             HeartbeatQueue: await _queue.GetQueueSas("task-heartbeat", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"),
-            JobResultQueue: await _queue.GetQueueSas("job-result", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"),
             Tags: task.Config.Tags ?? new Dictionary<string, string>()
         );
 
diff --git a/src/ApiService/ApiService/onefuzzlib/Extension.cs b/src/ApiService/ApiService/onefuzzlib/Extension.cs
index fbf62dd343..7995026eca 100644
--- a/src/ApiService/ApiService/onefuzzlib/Extension.cs
+++ b/src/ApiService/ApiService/onefuzzlib/Extension.cs
@@ -36,9 +36,7 @@ public async Async.Task<IList<VMExtensionWrapper>> GenericExtensions(AzureLocati
         var extensions = new List<VMExtensionWrapper>();
 
         var instanceConfig = await _context.ConfigOperations.Fetch();
-        if (vmOs == Os.Windows) {
-            extensions.Add(await MonitorExtension(region));
-        }
+        extensions.Add(await MonitorExtension(region, vmOs));
 
         var depenency = DependencyExtension(region, vmOs);
         if (depenency is not null) {
@@ -331,21 +329,37 @@ public async Async.Task<VMExtensionWrapper> AgentConfig(AzureLocation region, Os
         throw new NotSupportedException($"unsupported OS: {vmOs}");
     }
 
-    public async Async.Task<VMExtensionWrapper> MonitorExtension(AzureLocation region) {
+    public async Async.Task<VMExtensionWrapper> MonitorExtension(AzureLocation region, Os vmOs) {
         var settings = await _context.LogAnalytics.GetMonitorSettings();
         var extensionSettings = JsonSerializer.Serialize(new { WorkspaceId = settings.Id }, _extensionSerializerOptions);
         var protectedExtensionSettings = JsonSerializer.Serialize(new { WorkspaceKey = settings.Key }, _extensionSerializerOptions);
-        return new VMExtensionWrapper {
-            Location = region,
-            Name = "OMSExtension",
-            TypePropertiesType = "MicrosoftMonitoringAgent",
-            Publisher = "Microsoft.EnterpriseCloud.Monitoring",
-            TypeHandlerVersion = "1.0",
-            AutoUpgradeMinorVersion = true,
-            Settings = new BinaryData(extensionSettings),
-            ProtectedSettings = new BinaryData(protectedExtensionSettings),
-            EnableAutomaticUpgrade = false
-        };
+        if (vmOs == Os.Windows) {
+            return new VMExtensionWrapper {
+                Location = region,
+                Name = "OMSExtension",
+                TypePropertiesType = "MicrosoftMonitoringAgent",
+                Publisher = "Microsoft.EnterpriseCloud.Monitoring",
+                TypeHandlerVersion = "1.0",
+                AutoUpgradeMinorVersion = true,
+                Settings = new BinaryData(extensionSettings),
+                ProtectedSettings = new BinaryData(protectedExtensionSettings),
+                EnableAutomaticUpgrade = false
+            };
+        } else if (vmOs == Os.Linux) {
+            return new VMExtensionWrapper {
+                Location = region,
+                Name = "OmsAgentForLinux",
+                TypePropertiesType = "OmsAgentForLinux",
+                Publisher = "Microsoft.EnterpriseCloud.Monitoring",
+                TypeHandlerVersion = "1.0",
+                AutoUpgradeMinorVersion = true,
+                Settings = new BinaryData(extensionSettings),
+                ProtectedSettings = new BinaryData(protectedExtensionSettings),
+                EnableAutomaticUpgrade = false
+            };
+        } else {
+            throw new NotSupportedException($"unsupported os: {vmOs}");
+        }
     }
 
 
diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs
deleted file mode 100644
index 1166cf91d4..0000000000
--- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs
+++ /dev/null
@@ -1,121 +0,0 @@
-using ApiService.OneFuzzLib.Orm;
-using Microsoft.Extensions.Logging;
-using Polly;
-namespace Microsoft.OneFuzz.Service;
-
-public interface IJobResultOperations : IOrm<JobResult> {
-
-    Async.Task<JobResult?> GetJobResult(Guid jobId);
-    Async.Task<OneFuzzResultVoid> CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary<string, double> resultValue);
-
-}
-public class JobResultOperations : Orm<JobResult>, IJobResultOperations {
-
-    public JobResultOperations(ILogger<JobResultOperations> log, IOnefuzzContext context)
-        : base(log, context) {
-    }
-
-    public async Async.Task<JobResult?> GetJobResult(Guid jobId) {
-        return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync();
-    }
-
-    private JobResult UpdateResult(JobResult result, JobResultType type, Dictionary<string, double> resultValue) {
-
-        var newResult = result;
-        double newValue;
-        switch (type) {
-            case JobResultType.NewCrashingInput:
-                newValue = result.NewCrashingInput + resultValue["count"];
-                newResult = result with { NewCrashingInput = newValue };
-                break;
-            case JobResultType.NewReport:
-                newValue = result.NewReport + resultValue["count"];
-                newResult = result with { NewReport = newValue };
-                break;
-            case JobResultType.NewUniqueReport:
-                newValue = result.NewUniqueReport + resultValue["count"];
-                newResult = result with { NewUniqueReport = newValue };
-                break;
-            case JobResultType.NewRegressionReport:
-                newValue = result.NewRegressionReport + resultValue["count"];
-                newResult = result with { NewRegressionReport = newValue };
-                break;
-            case JobResultType.NewCrashDump:
-                newValue = result.NewCrashDump + resultValue["count"];
-                newResult = result with { NewCrashDump = newValue };
-                break;
-            case JobResultType.CoverageData:
-                double newCovered = resultValue["covered"];
-                double newTotalCovered = resultValue["features"];
-                double newCoverageRate = resultValue["rate"];
-                newResult = result with { InstructionsCovered = newCovered, TotalInstructions = newTotalCovered, CoverageRate = newCoverageRate };
-                break;
-            case JobResultType.RuntimeStats:
-                double newTotalIterations = resultValue["total_count"];
-                newResult = result with { IterationCount = newTotalIterations };
-                break;
-            default:
-                _logTracer.LogWarning($"Invalid Field {type}.");
-                break;
-        }
-        _logTracer.LogInformation($"Attempting to log new result: {newResult}");
-        return newResult;
-    }
-
-    private async Async.Task<bool> TryUpdate(Job job, JobResultType resultType, Dictionary<string, double> resultValue) {
-        var jobId = job.JobId;
-
-        var jobResult = await GetJobResult(jobId);
-
-        if (jobResult == null) {
-            _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId);
-
-            var entry = new JobResult(JobId: jobId, Project: job.Config.Project, Name: job.Config.Name);
-
-            jobResult = UpdateResult(entry, resultType, resultValue);
-
-            var r = await Insert(jobResult);
-            if (!r.IsOk) {
-                throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}");
-            }
-            _logTracer.LogInformation("created job result {JobId}", jobResult.JobId);
-        } else {
-            _logTracer.LogInformation("Updating existing JobResult entry for Job {JobId}", jobId);
-
-            jobResult = UpdateResult(jobResult, resultType, resultValue);
-
-            var r = await Update(jobResult);
-            if (!r.IsOk) {
-                throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}");
-            }
-            _logTracer.LogInformation("updated job result {JobId}", jobResult.JobId);
-        }
-
-        return true;
-    }
-
-    public async Async.Task<OneFuzzResultVoid> CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary<string, double> resultValue) {
-
-        var job = await _context.JobOperations.Get(jobId);
-        if (job == null) {
-            return OneFuzzResultVoid.Error(ErrorCode.INVALID_REQUEST, "invalid job");
-        }
-
-        var success = false;
-        try {
-            _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId);
-            var policy = Policy.Handle<InvalidOperationException>().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5));
-            await policy.ExecuteAsync(async () => {
-                success = await TryUpdate(job, resultType, resultValue);
-                _logTracer.LogInformation("attempt {success}", success);
-            });
-            return OneFuzzResultVoid.Ok;
-        } catch (Exception e) {
-            return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, new string[] {
-                    $"Unexpected failure when attempting to update job result for {job.JobId}",
-                    $"Exception: {e}"
-                });
-        }
-    }
-}
-
diff --git a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs
index 03c6322663..d877bfddbb 100644
--- a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs
+++ b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs
@@ -19,7 +19,6 @@ public interface IOnefuzzContext {
     IExtensions Extensions { get; }
     IIpOperations IpOperations { get; }
     IJobOperations JobOperations { get; }
-    IJobResultOperations JobResultOperations { get; }
     ILogAnalytics LogAnalytics { get; }
     INodeMessageOperations NodeMessageOperations { get; }
     INodeOperations NodeOperations { get; }
@@ -84,7 +83,6 @@ public OnefuzzContext(IServiceProvider serviceProvider) {
     public IVmOperations VmOperations => _serviceProvider.GetRequiredService<IVmOperations>();
     public ISecretsOperations SecretsOperations => _serviceProvider.GetRequiredService<ISecretsOperations>();
     public IJobOperations JobOperations => _serviceProvider.GetRequiredService<IJobOperations>();
-    public IJobResultOperations JobResultOperations => _serviceProvider.GetRequiredService<IJobResultOperations>();
     public IScheduler Scheduler => _serviceProvider.GetRequiredService<IScheduler>();
     public IConfig Config => _serviceProvider.GetRequiredService<IConfig>();
     public ILogAnalytics LogAnalytics => _serviceProvider.GetRequiredService<ILogAnalytics>();
diff --git a/src/ApiService/IntegrationTests/Fakes/TestContext.cs b/src/ApiService/IntegrationTests/Fakes/TestContext.cs
index 66d121e746..c46ff5fce7 100644
--- a/src/ApiService/IntegrationTests/Fakes/TestContext.cs
+++ b/src/ApiService/IntegrationTests/Fakes/TestContext.cs
@@ -32,7 +32,6 @@ public TestContext(IHttpClientFactory httpClientFactory, OneFuzzLoggerProvider p
         TaskOperations = new TaskOperations(provider.CreateLogger<TaskOperations>(), Cache, this);
         NodeOperations = new NodeOperations(provider.CreateLogger<NodeOperations>(), this);
         JobOperations = new JobOperations(provider.CreateLogger<JobOperations>(), this);
-        JobResultOperations = new JobResultOperations(provider.CreateLogger<JobResultOperations>(), this);
         NodeTasksOperations = new NodeTasksOperations(provider.CreateLogger<NodeTasksOperations>(), this);
         TaskEventOperations = new TaskEventOperations(provider.CreateLogger<TaskEventOperations>(), this);
         NodeMessageOperations = new NodeMessageOperations(provider.CreateLogger<NodeMessageOperations>(), this);
@@ -58,7 +57,6 @@ public Async.Task InsertAll(params EntityBase[] objs)
                 Node n => NodeOperations.Insert(n),
                 Pool p => PoolOperations.Insert(p),
                 Job j => JobOperations.Insert(j),
-                JobResult jr => JobResultOperations.Insert(jr),
                 Repro r => ReproOperations.Insert(r),
                 Scaleset ss => ScalesetOperations.Insert(ss),
                 NodeTasks nt => NodeTasksOperations.Insert(nt),
@@ -86,7 +84,6 @@ public Async.Task InsertAll(params EntityBase[] objs)
 
     public ITaskOperations TaskOperations { get; }
     public IJobOperations JobOperations { get; }
-    public IJobResultOperations JobResultOperations { get; }
     public INodeOperations NodeOperations { get; }
     public INodeTasksOperations NodeTasksOperations { get; }
     public ITaskEventOperations TaskEventOperations { get; }
diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock
index 254684be97..a1d86e7d25 100644
--- a/src/agent/Cargo.lock
+++ b/src/agent/Cargo.lock
@@ -2123,7 +2123,6 @@ dependencies = [
  "log",
  "nix",
  "notify",
- "onefuzz-result",
  "onefuzz-telemetry",
  "pete",
  "pretty_assertions",
@@ -2198,20 +2197,6 @@ dependencies = [
  "serde_json",
 ]
 
-[[package]]
-name = "onefuzz-result"
-version = "0.2.0"
-dependencies = [
- "anyhow",
- "async-trait",
- "log",
- "onefuzz-telemetry",
- "reqwest",
- "serde",
- "storage-queue",
- "uuid",
-]
-
 [[package]]
 name = "onefuzz-task"
 version = "0.2.0"
@@ -2241,7 +2226,6 @@ dependencies = [
  "num_cpus",
  "onefuzz",
  "onefuzz-file-format",
- "onefuzz-result",
  "onefuzz-telemetry",
  "path-absolutize",
  "pretty_assertions",
diff --git a/src/agent/Cargo.toml b/src/agent/Cargo.toml
index ce01ae880c..2f4cea41a4 100644
--- a/src/agent/Cargo.toml
+++ b/src/agent/Cargo.toml
@@ -10,7 +10,6 @@ members = [
     "onefuzz",
     "onefuzz-task",
     "onefuzz-agent",
-    "onefuzz-result",
     "onefuzz-file-format",
     "onefuzz-telemetry",
     "reqwest-retry",
diff --git a/src/agent/onefuzz-agent/src/config.rs b/src/agent/onefuzz-agent/src/config.rs
index fc623e72af..87edfb2c1b 100644
--- a/src/agent/onefuzz-agent/src/config.rs
+++ b/src/agent/onefuzz-agent/src/config.rs
@@ -34,8 +34,6 @@ pub struct StaticConfig {
 
     pub heartbeat_queue: Option<Url>,
 
-    pub job_result_queue: Option<Url>,
-
     pub instance_id: Uuid,
 
     #[serde(default = "default_as_true")]
@@ -73,8 +71,6 @@ struct RawStaticConfig {
 
     pub heartbeat_queue: Option<Url>,
 
-    pub job_result_queue: Option<Url>,
-
     pub instance_id: Uuid,
 
     #[serde(default = "default_as_true")]
@@ -121,7 +117,6 @@ impl StaticConfig {
             microsoft_telemetry_key: config.microsoft_telemetry_key,
             instance_telemetry_key: config.instance_telemetry_key,
             heartbeat_queue: config.heartbeat_queue,
-            job_result_queue: config.job_result_queue,
             instance_id: config.instance_id,
             managed: config.managed,
             machine_identity,
@@ -157,12 +152,6 @@ impl StaticConfig {
             None
         };
 
-        let job_result_queue = if let Ok(key) = std::env::var("ONEFUZZ_JOB_RESULT") {
-            Some(Url::parse(&key)?)
-        } else {
-            None
-        };
-
         let instance_telemetry_key =
             if let Ok(key) = std::env::var("ONEFUZZ_INSTANCE_TELEMETRY_KEY") {
                 Some(InstanceTelemetryKey::new(Uuid::parse_str(&key)?))
@@ -194,7 +183,6 @@ impl StaticConfig {
             instance_telemetry_key,
             microsoft_telemetry_key,
             heartbeat_queue,
-            job_result_queue,
             instance_id,
             managed: !is_unmanaged,
             machine_identity,
diff --git a/src/agent/onefuzz-agent/src/log_uploader.rs b/src/agent/onefuzz-agent/src/log_uploader.rs
index d424013421..6bccc0bef2 100644
--- a/src/agent/onefuzz-agent/src/log_uploader.rs
+++ b/src/agent/onefuzz-agent/src/log_uploader.rs
@@ -210,3 +210,32 @@ async fn sync_file(
     blob_client.append_block(Body::from(f)).await?;
     Ok(len)
 }
+
+#[cfg(test)]
+mod tests {
+    use std::io::Seek;
+
+    use anyhow::Result;
+    use tokio::io::{AsyncReadExt, AsyncSeekExt};
+
+    #[allow(clippy::unused_io_amount)]
+    #[tokio::test]
+    #[ignore]
+
+    async fn test_seek_behavior() -> Result<()> {
+        let path = "C:\\temp\\test.ps1";
+        let mut std_file = std::fs::File::open(path)?;
+        std_file.seek(std::io::SeekFrom::Start(3))?;
+
+        let mut tokio_file = tokio::fs::File::from_std(std_file);
+
+        let buf = &mut [0u8; 5];
+        tokio_file.read(buf).await?;
+        println!("******** buf {:?}", buf);
+        tokio_file.seek(std::io::SeekFrom::Start(0)).await?;
+        tokio_file.read(buf).await?;
+        println!("******** buf {:?}", buf);
+
+        Ok(())
+    }
+}
diff --git a/src/agent/onefuzz-agent/src/work.rs b/src/agent/onefuzz-agent/src/work.rs
index d0222744a7..b55d1d86a1 100644
--- a/src/agent/onefuzz-agent/src/work.rs
+++ b/src/agent/onefuzz-agent/src/work.rs
@@ -91,10 +91,7 @@ impl WorkSet {
 
     pub fn setup_dir(&self) -> Result<PathBuf> {
         let root = self.get_root_folder()?;
-        // Putting the setup container at the root for backward compatibility.
-        // The path of setup folder can be used as part of the deduplication logic in the bug filing service
-        let setup_root = root.parent().ok_or_else(|| anyhow!("Invalid root"))?;
-        self.setup_url.as_path(setup_root)
+        self.setup_url.as_path(root)
     }
 
     pub fn extra_setup_dir(&self) -> Result<Option<PathBuf>> {
diff --git a/src/agent/onefuzz-result/Cargo.toml b/src/agent/onefuzz-result/Cargo.toml
deleted file mode 100644
index 7c7de6615c..0000000000
--- a/src/agent/onefuzz-result/Cargo.toml
+++ /dev/null
@@ -1,18 +0,0 @@
-[package]
-name = "onefuzz-result"
-version = "0.2.0"
-authors = ["fuzzing@microsoft.com"]
-edition = "2021"
-publish = false
-license = "MIT"
-
-[dependencies]
-anyhow = { version = "1.0", features = ["backtrace"] }
-async-trait = "0.1"
-reqwest = "0.11"
-serde = "1.0"
-storage-queue = { path = "../storage-queue" }
-uuid = { version = "1.4", features = ["serde", "v4"] }
-onefuzz-telemetry = { path = "../onefuzz-telemetry" }
-log = "0.4"
-
diff --git a/src/agent/onefuzz-result/src/job_result.rs b/src/agent/onefuzz-result/src/job_result.rs
deleted file mode 100644
index b305eca2cb..0000000000
--- a/src/agent/onefuzz-result/src/job_result.rs
+++ /dev/null
@@ -1,129 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT License.
-
-use anyhow::Result;
-use async_trait::async_trait;
-use onefuzz_telemetry::warn;
-use reqwest::Url;
-use serde::{self, Deserialize, Serialize};
-use std::collections::HashMap;
-use std::sync::Arc;
-use storage_queue::QueueClient;
-use uuid::Uuid;
-
-#[derive(Debug, Deserialize, Serialize, Hash, Eq, PartialEq, Clone)]
-#[serde(tag = "type")]
-pub enum JobResultData {
-    NewCrashingInput,
-    NoReproCrashingInput,
-    NewReport,
-    NewUniqueReport,
-    NewRegressionReport,
-    NewCoverage,
-    NewCrashDump,
-    CoverageData,
-    RuntimeStats,
-}
-
-#[derive(Debug, Deserialize, Serialize, Clone)]
-struct JobResult {
-    task_id: Uuid,
-    job_id: Uuid,
-    machine_id: Uuid,
-    machine_name: String,
-    data: JobResultData,
-    value: HashMap<String, f64>,
-}
-
-#[derive(Clone)]
-pub struct TaskContext {
-    task_id: Uuid,
-    job_id: Uuid,
-    machine_id: Uuid,
-    machine_name: String,
-}
-
-pub struct JobResultContext<TaskContext> {
-    pub state: TaskContext,
-    pub queue_client: QueueClient,
-}
-
-pub struct JobResultClient<TaskContext> {
-    pub context: Arc<JobResultContext<TaskContext>>,
-}
-
-impl<TaskContext> JobResultClient<TaskContext> {
-    pub fn init_job_result(
-        context: TaskContext,
-        queue_url: Url,
-    ) -> Result<JobResultClient<TaskContext>>
-    where
-        TaskContext: Send + Sync + 'static,
-    {
-        let context = Arc::new(JobResultContext {
-            state: context,
-            queue_client: QueueClient::new(queue_url)?,
-        });
-
-        Ok(JobResultClient { context })
-    }
-}
-
-pub type TaskJobResultClient = JobResultClient<TaskContext>;
-
-pub async fn init_job_result(
-    queue_url: Url,
-    task_id: Uuid,
-    job_id: Uuid,
-    machine_id: Uuid,
-    machine_name: String,
-) -> Result<TaskJobResultClient> {
-    let hb = JobResultClient::init_job_result(
-        TaskContext {
-            task_id,
-            job_id,
-            machine_id,
-            machine_name,
-        },
-        queue_url,
-    )?;
-    Ok(hb)
-}
-
-#[async_trait]
-pub trait JobResultSender {
-    async fn send_direct(&self, data: JobResultData, value: HashMap<String, f64>);
-}
-
-#[async_trait]
-impl JobResultSender for TaskJobResultClient {
-    async fn send_direct(&self, data: JobResultData, value: HashMap<String, f64>) {
-        let task_id = self.context.state.task_id;
-        let job_id = self.context.state.job_id;
-        let machine_id = self.context.state.machine_id;
-        let machine_name = self.context.state.machine_name.clone();
-
-        let _ = self
-            .context
-            .queue_client
-            .enqueue(JobResult {
-                task_id,
-                job_id,
-                machine_id,
-                machine_name,
-                data,
-                value,
-            })
-            .await;
-    }
-}
-
-#[async_trait]
-impl JobResultSender for Option<TaskJobResultClient> {
-    async fn send_direct(&self, data: JobResultData, value: HashMap<String, f64>) {
-        match self {
-            Some(client) => client.send_direct(data, value).await,
-            None => warn!("Failed to send Job Result message data from agent."),
-        }
-    }
-}
diff --git a/src/agent/onefuzz-result/src/lib.rs b/src/agent/onefuzz-result/src/lib.rs
deleted file mode 100644
index dae666ca9a..0000000000
--- a/src/agent/onefuzz-result/src/lib.rs
+++ /dev/null
@@ -1,4 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT License.
-
-pub mod job_result;
diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml
index 4e0bd381b0..0ad2f9aa4f 100644
--- a/src/agent/onefuzz-task/Cargo.toml
+++ b/src/agent/onefuzz-task/Cargo.toml
@@ -39,7 +39,6 @@ serde_json = "1.0"
 serde_yaml = "0.9.21"
 onefuzz = { path = "../onefuzz" }
 onefuzz-telemetry = { path = "../onefuzz-telemetry" }
-onefuzz-result = { path = "../onefuzz-result" }
 path-absolutize = "3.1"
 reqwest-retry = { path = "../reqwest-retry" }
 strum = "0.25"
diff --git a/src/agent/onefuzz-task/src/local/cmd.rs b/src/agent/onefuzz-task/src/local/cmd.rs
index eabefb71ee..80fd51a96b 100644
--- a/src/agent/onefuzz-task/src/local/cmd.rs
+++ b/src/agent/onefuzz-task/src/local/cmd.rs
@@ -3,7 +3,11 @@
 
 #[cfg(any(target_os = "linux", target_os = "windows"))]
 use crate::local::coverage;
-use crate::local::{common::add_common_config, libfuzzer_fuzz, tui::TerminalUi};
+use crate::local::{
+    common::add_common_config, generic_analysis, generic_crash_report, generic_generator,
+    libfuzzer, libfuzzer_crash_report, libfuzzer_fuzz, libfuzzer_merge, libfuzzer_regression,
+    libfuzzer_test_input, radamsa, test_input, tui::TerminalUi,
+};
 use anyhow::{Context, Result};
 use clap::{Arg, ArgAction, Command};
 use std::time::Duration;
@@ -17,9 +21,19 @@ use super::template;
 #[derive(Debug, PartialEq, Eq, EnumString, IntoStaticStr, EnumIter)]
 #[strum(serialize_all = "kebab-case")]
 enum Commands {
+    Radamsa,
     #[cfg(any(target_os = "linux", target_os = "windows"))]
     Coverage,
     LibfuzzerFuzz,
+    LibfuzzerMerge,
+    LibfuzzerCrashReport,
+    LibfuzzerTestInput,
+    LibfuzzerRegression,
+    Libfuzzer,
+    CrashReport,
+    Generator,
+    Analysis,
+    TestInput,
     Template,
 }
 
@@ -54,7 +68,23 @@ pub async fn run(args: clap::ArgMatches) -> Result<()> {
         match command {
             #[cfg(any(target_os = "linux", target_os = "windows"))]
             Commands::Coverage => coverage::run(&sub_args, event_sender).await,
+            Commands::Radamsa => radamsa::run(&sub_args, event_sender).await,
+            Commands::LibfuzzerCrashReport => {
+                libfuzzer_crash_report::run(&sub_args, event_sender).await
+            }
             Commands::LibfuzzerFuzz => libfuzzer_fuzz::run(&sub_args, event_sender).await,
+            Commands::LibfuzzerMerge => libfuzzer_merge::run(&sub_args, event_sender).await,
+            Commands::LibfuzzerTestInput => {
+                libfuzzer_test_input::run(&sub_args, event_sender).await
+            }
+            Commands::LibfuzzerRegression => {
+                libfuzzer_regression::run(&sub_args, event_sender).await
+            }
+            Commands::Libfuzzer => libfuzzer::run(&sub_args, event_sender).await,
+            Commands::CrashReport => generic_crash_report::run(&sub_args, event_sender).await,
+            Commands::Generator => generic_generator::run(&sub_args, event_sender).await,
+            Commands::Analysis => generic_analysis::run(&sub_args, event_sender).await,
+            Commands::TestInput => test_input::run(&sub_args, event_sender).await,
             Commands::Template => {
                 let config = sub_args
                     .get_one::<PathBuf>("config")
@@ -110,7 +140,17 @@ pub fn args(name: &'static str) -> Command {
         let app = match subcommand {
             #[cfg(any(target_os = "linux", target_os = "windows"))]
             Commands::Coverage => coverage::args(subcommand.into()),
+            Commands::Radamsa => radamsa::args(subcommand.into()),
+            Commands::LibfuzzerCrashReport => libfuzzer_crash_report::args(subcommand.into()),
             Commands::LibfuzzerFuzz => libfuzzer_fuzz::args(subcommand.into()),
+            Commands::LibfuzzerMerge => libfuzzer_merge::args(subcommand.into()),
+            Commands::LibfuzzerTestInput => libfuzzer_test_input::args(subcommand.into()),
+            Commands::LibfuzzerRegression => libfuzzer_regression::args(subcommand.into()),
+            Commands::Libfuzzer => libfuzzer::args(subcommand.into()),
+            Commands::CrashReport => generic_crash_report::args(subcommand.into()),
+            Commands::Generator => generic_generator::args(subcommand.into()),
+            Commands::Analysis => generic_analysis::args(subcommand.into()),
+            Commands::TestInput => test_input::args(subcommand.into()),
             Commands::Template => Command::new("template")
                 .about("uses the template to generate a run")
                 .args(vec![Arg::new("config")
diff --git a/src/agent/onefuzz-task/src/local/common.rs b/src/agent/onefuzz-task/src/local/common.rs
index 17940d799f..f8d7949e80 100644
--- a/src/agent/onefuzz-task/src/local/common.rs
+++ b/src/agent/onefuzz-task/src/local/common.rs
@@ -26,10 +26,20 @@ pub const INPUTS_DIR: &str = "inputs_dir";
 pub const CRASHES_DIR: &str = "crashes_dir";
 pub const CRASHDUMPS_DIR: &str = "crashdumps_dir";
 pub const TARGET_WORKERS: &str = "target_workers";
+pub const REPORTS_DIR: &str = "reports_dir";
+pub const NO_REPRO_DIR: &str = "no_repro_dir";
 pub const TARGET_TIMEOUT: &str = "target_timeout";
+pub const CHECK_RETRY_COUNT: &str = "check_retry_count";
+pub const DISABLE_CHECK_QUEUE: &str = "disable_check_queue";
+pub const UNIQUE_REPORTS_DIR: &str = "unique_reports_dir";
 pub const COVERAGE_DIR: &str = "coverage_dir";
 pub const READONLY_INPUTS: &str = "readonly_inputs_dir";
+pub const CHECK_ASAN_LOG: &str = "check_asan_log";
+pub const TOOLS_DIR: &str = "tools_dir";
+pub const RENAME_OUTPUT: &str = "rename_output";
 pub const CHECK_FUZZER_HELP: &str = "check_fuzzer_help";
+pub const DISABLE_CHECK_DEBUGGER: &str = "disable_check_debugger";
+pub const REGRESSION_REPORTS_DIR: &str = "regression_reports_dir";
 
 pub const TARGET_EXE: &str = "target_exe";
 pub const TARGET_ENV: &str = "target_env";
@@ -37,6 +47,17 @@ pub const TARGET_OPTIONS: &str = "target_options";
 // pub const SUPERVISOR_EXE: &str = "supervisor_exe";
 // pub const SUPERVISOR_ENV: &str = "supervisor_env";
 // pub const SUPERVISOR_OPTIONS: &str = "supervisor_options";
+pub const GENERATOR_EXE: &str = "generator_exe";
+pub const GENERATOR_ENV: &str = "generator_env";
+pub const GENERATOR_OPTIONS: &str = "generator_options";
+
+pub const ANALYZER_EXE: &str = "analyzer_exe";
+pub const ANALYZER_OPTIONS: &str = "analyzer_options";
+pub const ANALYZER_ENV: &str = "analyzer_env";
+pub const ANALYSIS_DIR: &str = "analysis_dir";
+pub const ANALYSIS_INPUTS: &str = "analysis_inputs";
+pub const ANALYSIS_UNIQUE_INPUTS: &str = "analysis_unique_inputs";
+pub const PRESERVE_EXISTING_OUTPUTS: &str = "preserve_existing_outputs";
 
 pub const CREATE_JOB_DIR: &str = "create_job_dir";
 
@@ -45,6 +66,7 @@ const WAIT_FOR_DIR_DELAY: Duration = Duration::from_secs(1);
 
 pub enum CmdType {
     Target,
+    Generator,
     // Supervisor,
 }
 
@@ -68,6 +90,7 @@ pub fn get_cmd_exe(cmd_type: CmdType, args: &clap::ArgMatches) -> Result<String>
     let name = match cmd_type {
         CmdType::Target => TARGET_EXE,
         // CmdType::Supervisor => SUPERVISOR_EXE,
+        CmdType::Generator => GENERATOR_EXE,
     };
 
     args.get_one::<String>(name)
@@ -79,6 +102,7 @@ pub fn get_cmd_arg(cmd_type: CmdType, args: &clap::ArgMatches) -> Vec<String> {
     let name = match cmd_type {
         CmdType::Target => TARGET_OPTIONS,
         // CmdType::Supervisor => SUPERVISOR_OPTIONS,
+        CmdType::Generator => GENERATOR_OPTIONS,
     };
 
     args.get_many::<String>(name)
@@ -91,6 +115,7 @@ pub fn get_cmd_env(cmd_type: CmdType, args: &clap::ArgMatches) -> Result<HashMap
     let env_name = match cmd_type {
         CmdType::Target => TARGET_ENV,
         // CmdType::Supervisor => SUPERVISOR_ENV,
+        CmdType::Generator => GENERATOR_ENV,
     };
     get_hash_map(args, env_name)
 }
@@ -240,7 +265,6 @@ pub async fn build_local_context(
         },
         instance_telemetry_key: None,
         heartbeat_queue: None,
-        job_result_queue: None,
         microsoft_telemetry_key: None,
         logs: None,
         min_available_memory_mb: 0,
diff --git a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml
index aba02c7991..7210893809 100644
--- a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml
+++ b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml
@@ -5,31 +5,28 @@
 
 # 2. Install llvm and export LLVM_SYMBOLIZER_PATH like we do in setup.sh
 
-required_args: &required_args
-  target_exe: "REPLACE_ME" # The path to your target
-  inputs: &inputs "REPLACE_ME" # A folder containining your inputs
-  crashes: &crashes "REPLACE_ME" # The folder where you want the crashing inputs to be output
-  crashdumps: "REPLACE_ME" # The folder where you want the crash dumps to be output
-  coverage: "REPLACE_ME" # The folder where you want the code coverage to be output
-  regression_reports: "REPLACE_ME" # The folder where you want the regression reports to be output
-
 target_args: &target_args
-  <<: *required_args
   target_env: {}
+  target_exe: "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\fuzz.exe"
   target_options: []
 
+inputs: &inputs "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\seeds"
+
 tasks:
   - type: LibFuzzer
     <<: *target_args
+    inputs: *inputs
+    crashes: &crash "./crashes"
     readonly_inputs: []
     check_fuzzer_help: true
 
-  - type: LibfuzzerRegression
+  - type: "Report"
     <<: *target_args
-
-  - type: "LibfuzzerCrashReport"
-    <<: *target_args
-    input_queue: *crashes
+    input_queue: *crash
+    crashes: *crash
+    reports: "./reports"
+    unique_reports: "./unique_reports"
+    no_repro: "./no_repro"
     check_fuzzer_help: true
 
   - type: "Coverage"
@@ -38,11 +35,4 @@ tasks:
       - "{input}"
     input_queue: *inputs
     readonly_inputs: [*inputs]
-
-  # The analysis task is optional in the libfuzzer_basic template
-  # - type: Analysis
-  #   <<: *target_args
-  #   analysis: "REPLACE_ME" # The folder where you want the analysis results to be output
-  #   analyzer_exe: "REPLACE_ME"
-  #   analyzer_options: []
-  #   analyzer_env: {}
+    coverage: "./coverage"
diff --git a/src/agent/onefuzz-task/src/local/generic_analysis.rs b/src/agent/onefuzz-task/src/local/generic_analysis.rs
index 429e7b0e3b..3d3e2fafc8 100644
--- a/src/agent/onefuzz-task/src/local/generic_analysis.rs
+++ b/src/agent/onefuzz-task/src/local/generic_analysis.rs
@@ -3,13 +3,139 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::tasks::config::CommonConfig;
+use crate::{
+    local::common::{
+        build_local_context, get_cmd_arg, get_cmd_exe, get_hash_map, get_synced_dir, CmdType,
+        SyncCountDirMonitor, UiEvent, ANALYSIS_DIR, ANALYZER_ENV, ANALYZER_EXE, ANALYZER_OPTIONS,
+        CRASHES_DIR, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TOOLS_DIR,
+        UNIQUE_REPORTS_DIR,
+    },
+    tasks::{
+        analysis::generic::{run as run_analysis, Config},
+        config::CommonConfig,
+    },
+};
 use anyhow::Result;
 use async_trait::async_trait;
+use clap::{Arg, Command};
+use flume::Sender;
 use schemars::JsonSchema;
+use storage_queue::QueueClient;
 
 use super::template::{RunContext, Template};
 
+pub fn build_analysis_config(
+    args: &clap::ArgMatches,
+    input_queue: Option<QueueClient>,
+    common: CommonConfig,
+    event_sender: Option<Sender<UiEvent>>,
+) -> Result<Config> {
+    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
+    let target_options = get_cmd_arg(CmdType::Target, args);
+
+    let analyzer_exe = args
+        .get_one::<String>(ANALYZER_EXE)
+        .cloned()
+        .ok_or_else(|| format_err!("expected {ANALYZER_EXE}"))?;
+
+    let analyzer_options = args
+        .get_many::<String>(ANALYZER_OPTIONS)
+        .unwrap_or_default()
+        .map(|x| x.to_string())
+        .collect();
+
+    let analyzer_env = get_hash_map(args, ANALYZER_ENV)?;
+    let analysis = get_synced_dir(ANALYSIS_DIR, common.job_id, common.task_id, args)?
+        .monitor_count(&event_sender)?;
+    let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args)?;
+    let crashes = if input_queue.is_none() {
+        get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)
+            .ok()
+            .monitor_count(&event_sender)?
+    } else {
+        None
+    };
+    let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+    let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+    let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+
+    let config = Config {
+        analyzer_exe,
+        analyzer_options,
+        analyzer_env,
+        target_exe,
+        target_options,
+        input_queue,
+        crashes,
+        analysis,
+        tools: Some(tools),
+        reports,
+        unique_reports,
+        no_repro,
+        common,
+    };
+
+    Ok(config)
+}
+
+pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
+    let context = build_local_context(args, true, event_sender.clone()).await?;
+    let config = build_analysis_config(args, None, context.common_config.clone(), event_sender)?;
+    run_analysis(config).await
+}
+
+pub fn build_shared_args(required_task: bool) -> Vec<Arg> {
+    vec![
+        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
+        Arg::new(TARGET_ENV)
+            .long(TARGET_ENV)
+            .requires(TARGET_EXE)
+            .num_args(0..),
+        Arg::new(TARGET_OPTIONS)
+            .long(TARGET_OPTIONS)
+            .default_value("{input}")
+            .value_delimiter(' ')
+            .help("Use a quoted string with space separation to denote multiple arguments"),
+        Arg::new(CRASHES_DIR)
+            .long(CRASHES_DIR)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(ANALYZER_OPTIONS)
+            .long(ANALYZER_OPTIONS)
+            .requires(ANALYZER_EXE)
+            .value_delimiter(' ')
+            .help("Use a quoted string with space separation to denote multiple arguments"),
+        Arg::new(ANALYZER_ENV)
+            .long(ANALYZER_ENV)
+            .requires(ANALYZER_EXE)
+            .num_args(0..),
+        Arg::new(TOOLS_DIR)
+            .long(TOOLS_DIR)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(ANALYZER_EXE)
+            .long(ANALYZER_EXE)
+            .requires(ANALYSIS_DIR)
+            .requires(CRASHES_DIR)
+            .required(required_task),
+        Arg::new(ANALYSIS_DIR)
+            .long(ANALYSIS_DIR)
+            .requires(ANALYZER_EXE)
+            .requires(CRASHES_DIR)
+            .required(required_task),
+    ]
+}
+
+pub fn args(name: &'static str) -> Command {
+    Command::new(name)
+        .about("execute a local-only generic analysis")
+        .args(&build_shared_args(true))
+}
+
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct Analysis {
     analyzer_exe: String,
@@ -20,7 +146,7 @@ pub struct Analysis {
     input_queue: Option<PathBuf>,
     crashes: Option<PathBuf>,
     analysis: PathBuf,
-    tools: Option<PathBuf>,
+    tools: PathBuf,
     reports: Option<PathBuf>,
     unique_reports: Option<PathBuf>,
     no_repro: Option<PathBuf>,
@@ -49,10 +175,9 @@ impl Template for Analysis {
                 .and_then(|path| context.to_monitored_sync_dir("crashes", path).ok()),
 
             analysis: context.to_monitored_sync_dir("analysis", self.analysis.clone())?,
-            tools: self
-                .tools
-                .as_ref()
-                .and_then(|path| context.to_monitored_sync_dir("tools", path).ok()),
+            tools: context
+                .to_monitored_sync_dir("tools", self.tools.clone())
+                .ok(),
 
             reports: self
                 .reports
diff --git a/src/agent/onefuzz-task/src/local/generic_crash_report.rs b/src/agent/onefuzz-task/src/local/generic_crash_report.rs
index 347a8cac76..6b0e2fccad 100644
--- a/src/agent/onefuzz-task/src/local/generic_crash_report.rs
+++ b/src/agent/onefuzz-task/src/local/generic_crash_report.rs
@@ -3,14 +3,150 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::tasks::{config::CommonConfig, utils::default_bool_true};
+use crate::{
+    local::common::{
+        build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType,
+        SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, CRASHES_DIR,
+        DISABLE_CHECK_DEBUGGER, DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV,
+        TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR,
+    },
+    tasks::{
+        config::CommonConfig,
+        report::generic::{Config, ReportTask},
+        utils::default_bool_true,
+    },
+};
 use anyhow::Result;
 use async_trait::async_trait;
+use clap::{Arg, ArgAction, Command};
+use flume::Sender;
 use futures::future::OptionFuture;
 use schemars::JsonSchema;
+use storage_queue::QueueClient;
 
 use super::template::{RunContext, Template};
 
+pub fn build_report_config(
+    args: &clap::ArgMatches,
+    input_queue: Option<QueueClient>,
+    common: CommonConfig,
+    event_sender: Option<Sender<UiEvent>>,
+) -> Result<Config> {
+    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
+    let target_env = get_cmd_env(CmdType::Target, args)?;
+    let target_options = get_cmd_arg(CmdType::Target, args);
+
+    let crashes = Some(get_synced_dir(
+        CRASHES_DIR,
+        common.job_id,
+        common.task_id,
+        args,
+    )?)
+    .monitor_count(&event_sender)?;
+    let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+    let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+
+    let unique_reports = Some(get_synced_dir(
+        UNIQUE_REPORTS_DIR,
+        common.job_id,
+        common.task_id,
+        args,
+    )?)
+    .monitor_count(&event_sender)?;
+
+    let target_timeout = args.get_one::<u64>(TARGET_TIMEOUT).copied();
+
+    let check_retry_count = args
+        .get_one::<u64>(CHECK_RETRY_COUNT)
+        .copied()
+        .expect("has a default");
+
+    let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE);
+    let check_asan_log = args.get_flag(CHECK_ASAN_LOG);
+    let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER);
+
+    let config = Config {
+        target_exe,
+        target_env,
+        target_options,
+        target_timeout,
+        check_asan_log,
+        check_debugger,
+        check_retry_count,
+        check_queue,
+        crashes,
+        minimized_stack_depth: None,
+        input_queue,
+        no_repro,
+        reports,
+        unique_reports,
+        common,
+    };
+
+    Ok(config)
+}
+
+pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
+    let context = build_local_context(args, true, event_sender.clone()).await?;
+    let config = build_report_config(args, None, context.common_config.clone(), event_sender)?;
+    ReportTask::new(config).managed_run().await
+}
+
+pub fn build_shared_args() -> Vec<Arg> {
+    vec![
+        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
+        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
+        Arg::new(TARGET_OPTIONS)
+            .default_value("{input}")
+            .long(TARGET_OPTIONS)
+            .value_delimiter(' ')
+            .help("Use a quoted string with space separation to denote multiple arguments"),
+        Arg::new(CRASHES_DIR)
+            .long(CRASHES_DIR)
+            .required(true)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(REPORTS_DIR)
+            .long(REPORTS_DIR)
+            .required(false)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(NO_REPRO_DIR)
+            .long(NO_REPRO_DIR)
+            .required(false)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(UNIQUE_REPORTS_DIR)
+            .long(UNIQUE_REPORTS_DIR)
+            .value_parser(value_parser!(PathBuf))
+            .required(true),
+        Arg::new(TARGET_TIMEOUT)
+            .long(TARGET_TIMEOUT)
+            .value_parser(value_parser!(u64))
+            .default_value("30"),
+        Arg::new(CHECK_RETRY_COUNT)
+            .long(CHECK_RETRY_COUNT)
+            .value_parser(value_parser!(u64))
+            .default_value("0"),
+        Arg::new(DISABLE_CHECK_QUEUE)
+            .action(ArgAction::SetTrue)
+            .long(DISABLE_CHECK_QUEUE),
+        Arg::new(CHECK_ASAN_LOG)
+            .action(ArgAction::SetTrue)
+            .long(CHECK_ASAN_LOG),
+        Arg::new(DISABLE_CHECK_DEBUGGER)
+            .action(ArgAction::SetTrue)
+            .long(DISABLE_CHECK_DEBUGGER),
+    ]
+}
+
+pub fn args(name: &'static str) -> Command {
+    Command::new(name)
+        .about("execute a local-only generic crash report")
+        .args(&build_shared_args())
+}
+
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct CrashReport {
     target_exe: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/generic_generator.rs b/src/agent/onefuzz-task/src/local/generic_generator.rs
index ae9f6a3cc6..823ba221d6 100644
--- a/src/agent/onefuzz-task/src/local/generic_generator.rs
+++ b/src/agent/onefuzz-task/src/local/generic_generator.rs
@@ -3,14 +3,154 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::tasks::{config::CommonConfig, utils::default_bool_true};
+use crate::{
+    local::common::{
+        build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir,
+        get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT,
+        CRASHES_DIR, DISABLE_CHECK_DEBUGGER, GENERATOR_ENV, GENERATOR_EXE, GENERATOR_OPTIONS,
+        READONLY_INPUTS, RENAME_OUTPUT, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT,
+        TOOLS_DIR,
+    },
+    tasks::{
+        config::CommonConfig,
+        fuzz::generator::{Config, GeneratorTask},
+        utils::default_bool_true,
+    },
+};
 use anyhow::Result;
 use async_trait::async_trait;
+use clap::{Arg, ArgAction, Command};
+use flume::Sender;
 use onefuzz::syncdir::SyncedDir;
 use schemars::JsonSchema;
 
 use super::template::{RunContext, Template};
 
+pub fn build_fuzz_config(
+    args: &clap::ArgMatches,
+    common: CommonConfig,
+    event_sender: Option<Sender<UiEvent>>,
+) -> Result<Config> {
+    let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)?
+        .monitor_count(&event_sender)?;
+    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
+    let target_options = get_cmd_arg(CmdType::Target, args);
+    let target_env = get_cmd_env(CmdType::Target, args)?;
+
+    let generator_exe = get_cmd_exe(CmdType::Generator, args)?;
+    let generator_options = get_cmd_arg(CmdType::Generator, args);
+    let generator_env = get_cmd_env(CmdType::Generator, args)?;
+    let readonly_inputs = get_synced_dirs(READONLY_INPUTS, common.job_id, common.task_id, args)?
+        .into_iter()
+        .map(|sd| sd.monitor_count(&event_sender))
+        .collect::<Result<Vec<_>>>()?;
+
+    let rename_output = args.get_flag(RENAME_OUTPUT);
+    let check_asan_log = args.get_flag(CHECK_ASAN_LOG);
+    let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER);
+
+    let check_retry_count = args
+        .get_one::<u64>(CHECK_RETRY_COUNT)
+        .copied()
+        .expect("has a default");
+
+    let target_timeout = Some(
+        args.get_one::<u64>(TARGET_TIMEOUT)
+            .copied()
+            .expect("has a default"),
+    );
+
+    let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+
+    let ensemble_sync_delay = None;
+
+    let config = Config {
+        generator_exe,
+        generator_env,
+        generator_options,
+        readonly_inputs,
+        crashes,
+        tools,
+        target_exe,
+        target_env,
+        target_options,
+        target_timeout,
+        check_asan_log,
+        check_debugger,
+        check_retry_count,
+        rename_output,
+        ensemble_sync_delay,
+        common,
+    };
+
+    Ok(config)
+}
+
+pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
+    let context = build_local_context(args, true, event_sender.clone()).await?;
+    let config = build_fuzz_config(args, context.common_config.clone(), event_sender)?;
+    GeneratorTask::new(config).run().await
+}
+
+pub fn build_shared_args() -> Vec<Arg> {
+    vec![
+        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
+        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
+        Arg::new(TARGET_OPTIONS)
+            .default_value("{input}")
+            .long(TARGET_OPTIONS)
+            .value_delimiter(' ')
+            .help("Use a quoted string with space separation to denote multiple arguments"),
+        Arg::new(GENERATOR_EXE)
+            .long(GENERATOR_EXE)
+            .default_value("radamsa")
+            .required(true),
+        Arg::new(GENERATOR_ENV).long(GENERATOR_ENV).num_args(0..),
+        Arg::new(GENERATOR_OPTIONS)
+            .long(GENERATOR_OPTIONS)
+            .value_delimiter(' ')
+            .default_value("-H sha256 -o {generated_inputs}/input-%h.%s -n 100 -r {input_corpus}")
+            .help("Use a quoted string with space separation to denote multiple arguments"),
+        Arg::new(CRASHES_DIR)
+            .required(true)
+            .long(CRASHES_DIR)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(READONLY_INPUTS)
+            .required(true)
+            .num_args(1..)
+            .value_parser(value_parser!(PathBuf))
+            .long(READONLY_INPUTS),
+        Arg::new(TOOLS_DIR)
+            .long(TOOLS_DIR)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(CHECK_RETRY_COUNT)
+            .long(CHECK_RETRY_COUNT)
+            .value_parser(value_parser!(u64))
+            .default_value("0"),
+        Arg::new(CHECK_ASAN_LOG)
+            .action(ArgAction::SetTrue)
+            .long(CHECK_ASAN_LOG),
+        Arg::new(RENAME_OUTPUT)
+            .action(ArgAction::SetTrue)
+            .long(RENAME_OUTPUT),
+        Arg::new(TARGET_TIMEOUT)
+            .long(TARGET_TIMEOUT)
+            .value_parser(value_parser!(u64))
+            .default_value("30"),
+        Arg::new(DISABLE_CHECK_DEBUGGER)
+            .action(ArgAction::SetTrue)
+            .long(DISABLE_CHECK_DEBUGGER),
+    ]
+}
+
+pub fn args(name: &'static str) -> Command {
+    Command::new(name)
+        .about("execute a local-only generator fuzzing task")
+        .args(&build_shared_args())
+}
+
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct Generator {
     generator_exe: String,
diff --git a/src/agent/onefuzz-task/src/local/libfuzzer.rs b/src/agent/onefuzz-task/src/local/libfuzzer.rs
index 433636be1c..56dff7dbe3 100644
--- a/src/agent/onefuzz-task/src/local/libfuzzer.rs
+++ b/src/agent/onefuzz-task/src/local/libfuzzer.rs
@@ -1,19 +1,168 @@
 // Copyright (c) Microsoft Corporation.
 // Licensed under the MIT License.
 
-use crate::tasks::{
-    config::CommonConfig,
-    fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask},
-    utils::default_bool_true,
+#[cfg(any(target_os = "linux", target_os = "windows"))]
+use crate::{
+    local::{common::COVERAGE_DIR, coverage, coverage::build_shared_args as build_coverage_args},
+    tasks::coverage::generic::CoverageTask,
+};
+use crate::{
+    local::{
+        common::{
+            build_local_context, wait_for_dir, DirectoryMonitorQueue, UiEvent, ANALYZER_EXE,
+            REGRESSION_REPORTS_DIR, UNIQUE_REPORTS_DIR,
+        },
+        generic_analysis::{build_analysis_config, build_shared_args as build_analysis_args},
+        libfuzzer_crash_report::{build_report_config, build_shared_args as build_crash_args},
+        libfuzzer_fuzz::{build_fuzz_config, build_shared_args as build_fuzz_args},
+        libfuzzer_regression::{
+            build_regression_config, build_shared_args as build_regression_args,
+        },
+    },
+    tasks::{
+        analysis::generic::run as run_analysis,
+        config::CommonConfig,
+        fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask},
+        regression::libfuzzer::LibFuzzerRegressionTask,
+        report::libfuzzer_report::ReportTask,
+        utils::default_bool_true,
+    },
 };
 use anyhow::Result;
 use async_trait::async_trait;
-use onefuzz::syncdir::SyncedDir;
+use clap::Command;
+use flume::Sender;
+use onefuzz::{syncdir::SyncedDir, utils::try_wait_all_join_handles};
 use schemars::JsonSchema;
-use std::{collections::HashMap, path::PathBuf};
+use std::{
+    collections::{HashMap, HashSet},
+    path::PathBuf,
+};
+use tokio::task::spawn;
+use uuid::Uuid;
 
 use super::template::{RunContext, Template};
 
+pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
+    let context = build_local_context(args, true, event_sender.clone()).await?;
+    let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?;
+    let crash_dir = fuzz_config
+        .crashes
+        .remote_url()?
+        .as_file_path()
+        .expect("invalid crash dir remote location");
+
+    let fuzzer = LibFuzzerFuzzTask::new(fuzz_config)?;
+    let mut task_handles = vec![];
+
+    let fuzz_task = spawn(async move { fuzzer.run().await });
+
+    wait_for_dir(&crash_dir).await?;
+
+    task_handles.push(fuzz_task);
+
+    if args.contains_id(UNIQUE_REPORTS_DIR) {
+        let crash_report_input_monitor =
+            DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?;
+
+        let report_config = build_report_config(
+            args,
+            Some(crash_report_input_monitor.queue_client),
+            CommonConfig {
+                task_id: Uuid::new_v4(),
+                ..context.common_config.clone()
+            },
+            event_sender.clone(),
+        )?;
+
+        let mut report = ReportTask::new(report_config);
+        let report_task = spawn(async move { report.managed_run().await });
+
+        task_handles.push(report_task);
+        task_handles.push(crash_report_input_monitor.handle);
+    }
+
+    #[cfg(any(target_os = "linux", target_os = "windows"))]
+    if args.contains_id(COVERAGE_DIR) {
+        let coverage_input_monitor =
+            DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?;
+        let coverage_config = coverage::build_coverage_config(
+            args,
+            true,
+            Some(coverage_input_monitor.queue_client),
+            CommonConfig {
+                task_id: Uuid::new_v4(),
+                ..context.common_config.clone()
+            },
+            event_sender.clone(),
+        )?;
+
+        let mut coverage = CoverageTask::new(coverage_config);
+        let coverage_task = spawn(async move { coverage.run().await });
+
+        task_handles.push(coverage_task);
+        task_handles.push(coverage_input_monitor.handle);
+    }
+
+    if args.contains_id(ANALYZER_EXE) {
+        let analysis_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir).await?;
+        let analysis_config = build_analysis_config(
+            args,
+            Some(analysis_input_monitor.queue_client),
+            CommonConfig {
+                task_id: Uuid::new_v4(),
+                ..context.common_config.clone()
+            },
+            event_sender.clone(),
+        )?;
+        let analysis_task = spawn(async move { run_analysis(analysis_config).await });
+
+        task_handles.push(analysis_task);
+        task_handles.push(analysis_input_monitor.handle);
+    }
+
+    if args.contains_id(REGRESSION_REPORTS_DIR) {
+        let regression_config = build_regression_config(
+            args,
+            CommonConfig {
+                task_id: Uuid::new_v4(),
+                ..context.common_config.clone()
+            },
+            event_sender,
+        )?;
+        let regression = LibFuzzerRegressionTask::new(regression_config);
+        let regression_task = spawn(async move { regression.run().await });
+        task_handles.push(regression_task);
+    }
+
+    try_wait_all_join_handles(task_handles).await?;
+
+    Ok(())
+}
+
+pub fn args(name: &'static str) -> Command {
+    let mut app = Command::new(name).about("run a local libfuzzer & crash reporting task");
+
+    let mut used = HashSet::new();
+
+    for args in &[
+        build_fuzz_args(),
+        build_crash_args(),
+        build_analysis_args(false),
+        #[cfg(any(target_os = "linux", target_os = "windows"))]
+        build_coverage_args(true),
+        build_regression_args(false),
+    ] {
+        for arg in args {
+            if used.insert(arg.get_id()) {
+                app = app.arg(arg);
+            }
+        }
+    }
+
+    app
+}
+
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct LibFuzzer {
     inputs: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs
index 04ba4f9225..c1ab283575 100644
--- a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs
+++ b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs
@@ -3,13 +3,139 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::tasks::{config::CommonConfig, utils::default_bool_true};
+use crate::{
+    local::common::{
+        build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType,
+        SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, CRASHES_DIR,
+        DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS,
+        TARGET_TIMEOUT, UNIQUE_REPORTS_DIR,
+    },
+    tasks::{
+        config::CommonConfig,
+        report::libfuzzer_report::{Config, ReportTask},
+        utils::default_bool_true,
+    },
+};
 use anyhow::Result;
 use async_trait::async_trait;
+use clap::{Arg, ArgAction, Command};
+use flume::Sender;
 use futures::future::OptionFuture;
 use schemars::JsonSchema;
+use storage_queue::QueueClient;
 
 use super::template::{RunContext, Template};
+
+pub fn build_report_config(
+    args: &clap::ArgMatches,
+    input_queue: Option<QueueClient>,
+    common: CommonConfig,
+    event_sender: Option<Sender<UiEvent>>,
+) -> Result<Config> {
+    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
+    let target_env = get_cmd_env(CmdType::Target, args)?;
+    let target_options = get_cmd_arg(CmdType::Target, args);
+
+    let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+    let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+
+    let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+
+    let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+
+    let target_timeout = args.get_one::<u64>(TARGET_TIMEOUT).copied();
+
+    let check_retry_count = args
+        .get_one::<u64>(CHECK_RETRY_COUNT)
+        .copied()
+        .expect("has a default");
+
+    let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE);
+
+    let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP);
+
+    let crashes = if input_queue.is_none() { crashes } else { None };
+
+    let config = Config {
+        target_exe,
+        target_env,
+        target_options,
+        target_timeout,
+        check_retry_count,
+        check_fuzzer_help,
+        minimized_stack_depth: None,
+        input_queue,
+        check_queue,
+        crashes,
+        reports,
+        no_repro,
+        unique_reports,
+        common,
+    };
+
+    Ok(config)
+}
+
+pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
+    let context = build_local_context(args, true, event_sender.clone()).await?;
+    let config = build_report_config(args, None, context.common_config.clone(), event_sender)?;
+    ReportTask::new(config).managed_run().await
+}
+
+pub fn build_shared_args() -> Vec<Arg> {
+    vec![
+        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
+        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
+        Arg::new(TARGET_OPTIONS)
+            .long(TARGET_OPTIONS)
+            .value_delimiter(' ')
+            .help("Use a quoted string with space separation to denote multiple arguments"),
+        Arg::new(CRASHES_DIR)
+            .long(CRASHES_DIR)
+            .required(true)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(REPORTS_DIR)
+            .long(REPORTS_DIR)
+            .required(false)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(NO_REPRO_DIR)
+            .long(NO_REPRO_DIR)
+            .required(false)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(UNIQUE_REPORTS_DIR)
+            .long(UNIQUE_REPORTS_DIR)
+            .required(true)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(TARGET_TIMEOUT)
+            .value_parser(value_parser!(u64))
+            .long(TARGET_TIMEOUT),
+        Arg::new(CHECK_RETRY_COUNT)
+            .long(CHECK_RETRY_COUNT)
+            .value_parser(value_parser!(u64))
+            .default_value("0"),
+        Arg::new(DISABLE_CHECK_QUEUE)
+            .action(ArgAction::SetTrue)
+            .long(DISABLE_CHECK_QUEUE),
+        Arg::new(CHECK_FUZZER_HELP)
+            .action(ArgAction::SetTrue)
+            .long(CHECK_FUZZER_HELP),
+    ]
+}
+
+pub fn args(name: &'static str) -> Command {
+    Command::new(name)
+        .about("execute a local-only libfuzzer crash report task")
+        .args(&build_shared_args())
+}
+
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct LibfuzzerCrashReport {
     target_exe: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs
index 4b3e4ce58f..69c9df820b 100644
--- a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs
+++ b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs
@@ -3,15 +3,97 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::tasks::{config::CommonConfig, utils::default_bool_true};
+use crate::{
+    local::common::{
+        build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir,
+        get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, ANALYSIS_INPUTS,
+        ANALYSIS_UNIQUE_INPUTS, CHECK_FUZZER_HELP, INPUTS_DIR, PRESERVE_EXISTING_OUTPUTS,
+        TARGET_ENV, TARGET_EXE, TARGET_OPTIONS,
+    },
+    tasks::{
+        config::CommonConfig,
+        merge::libfuzzer_merge::{spawn, Config},
+        utils::default_bool_true,
+    },
+};
 use anyhow::Result;
 use async_trait::async_trait;
+use clap::{Arg, ArgAction, Command};
+use flume::Sender;
 use futures::future::OptionFuture;
 use onefuzz::syncdir::SyncedDir;
 use schemars::JsonSchema;
+use storage_queue::QueueClient;
 
 use super::template::{RunContext, Template};
 
+pub fn build_merge_config(
+    args: &clap::ArgMatches,
+    input_queue: Option<QueueClient>,
+    common: CommonConfig,
+    event_sender: Option<Sender<UiEvent>>,
+) -> Result<Config> {
+    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
+    let target_env = get_cmd_env(CmdType::Target, args)?;
+    let target_options = get_cmd_arg(CmdType::Target, args);
+    let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP);
+    let inputs = get_synced_dirs(ANALYSIS_INPUTS, common.job_id, common.task_id, args)?
+        .into_iter()
+        .map(|sd| sd.monitor_count(&event_sender))
+        .collect::<Result<Vec<_>>>()?;
+    let unique_inputs =
+        get_synced_dir(ANALYSIS_UNIQUE_INPUTS, common.job_id, common.task_id, args)?
+            .monitor_count(&event_sender)?;
+    let preserve_existing_outputs = args
+        .get_one::<bool>(PRESERVE_EXISTING_OUTPUTS)
+        .copied()
+        .unwrap_or_default();
+
+    let config = Config {
+        target_exe,
+        target_env,
+        target_options,
+        input_queue,
+        inputs,
+        unique_inputs,
+        preserve_existing_outputs,
+        check_fuzzer_help,
+        common,
+    };
+
+    Ok(config)
+}
+
+pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
+    let context = build_local_context(args, true, event_sender.clone()).await?;
+    let config = build_merge_config(args, None, context.common_config.clone(), event_sender)?;
+    spawn(config).await
+}
+
+pub fn build_shared_args() -> Vec<Arg> {
+    vec![
+        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
+        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
+        Arg::new(TARGET_OPTIONS)
+            .long(TARGET_OPTIONS)
+            .value_delimiter(' ')
+            .help("Use a quoted string with space separation to denote multiple arguments"),
+        Arg::new(CHECK_FUZZER_HELP)
+            .action(ArgAction::SetTrue)
+            .long(CHECK_FUZZER_HELP),
+        Arg::new(INPUTS_DIR)
+            .long(INPUTS_DIR)
+            .value_parser(value_parser!(PathBuf))
+            .num_args(0..),
+    ]
+}
+
+pub fn args(name: &'static str) -> Command {
+    Command::new(name)
+        .about("execute a local-only libfuzzer crash report task")
+        .args(&build_shared_args())
+}
+
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct LibfuzzerMerge {
     target_exe: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs
index 3fbb9f0bd6..501d2385e2 100644
--- a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs
+++ b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs
@@ -3,13 +3,145 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::tasks::{config::CommonConfig, utils::default_bool_true};
+use crate::{
+    local::common::{
+        build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType,
+        SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, COVERAGE_DIR,
+        CRASHES_DIR, NO_REPRO_DIR, REGRESSION_REPORTS_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE,
+        TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR,
+    },
+    tasks::{
+        config::CommonConfig,
+        regression::libfuzzer::{Config, LibFuzzerRegressionTask},
+        utils::default_bool_true,
+    },
+};
 use anyhow::Result;
 use async_trait::async_trait;
+use clap::{Arg, ArgAction, Command};
+use flume::Sender;
 use schemars::JsonSchema;
 
 use super::template::{RunContext, Template};
 
+const REPORT_NAMES: &str = "report_names";
+
+pub fn build_regression_config(
+    args: &clap::ArgMatches,
+    common: CommonConfig,
+    event_sender: Option<Sender<UiEvent>>,
+) -> Result<Config> {
+    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
+    let target_env = get_cmd_env(CmdType::Target, args)?;
+    let target_options = get_cmd_arg(CmdType::Target, args);
+    let target_timeout = args.get_one::<u64>(TARGET_TIMEOUT).copied();
+    let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)?
+        .monitor_count(&event_sender)?;
+    let regression_reports =
+        get_synced_dir(REGRESSION_REPORTS_DIR, common.job_id, common.task_id, args)?
+            .monitor_count(&event_sender)?;
+    let check_retry_count = args
+        .get_one::<u64>(CHECK_RETRY_COUNT)
+        .copied()
+        .expect("has a default value");
+
+    let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+    let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+    let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args)
+        .ok()
+        .monitor_count(&event_sender)?;
+
+    let report_list: Option<Vec<String>> = args
+        .get_many::<String>(REPORT_NAMES)
+        .map(|x| x.cloned().collect());
+
+    let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP);
+
+    let config = Config {
+        target_exe,
+        target_env,
+        target_options,
+        target_timeout,
+        check_fuzzer_help,
+        check_retry_count,
+        crashes,
+        regression_reports,
+        reports,
+        no_repro,
+        unique_reports,
+        readonly_inputs: None,
+        report_list,
+        minimized_stack_depth: None,
+        common,
+    };
+    Ok(config)
+}
+
+pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
+    let context = build_local_context(args, true, event_sender.clone()).await?;
+    let config = build_regression_config(args, context.common_config.clone(), event_sender)?;
+    LibFuzzerRegressionTask::new(config).run().await
+}
+
+pub fn build_shared_args(local_job: bool) -> Vec<Arg> {
+    let mut args = vec![
+        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
+        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
+        Arg::new(TARGET_OPTIONS)
+            .long(TARGET_OPTIONS)
+            .value_delimiter(' ')
+            .help("Use a quoted string with space separation to denote multiple arguments"),
+        Arg::new(COVERAGE_DIR)
+            .required(!local_job)
+            .long(COVERAGE_DIR)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(CHECK_FUZZER_HELP)
+            .action(ArgAction::SetTrue)
+            .long(CHECK_FUZZER_HELP),
+        Arg::new(TARGET_TIMEOUT)
+            .long(TARGET_TIMEOUT)
+            .value_parser(value_parser!(u64)),
+        Arg::new(CRASHES_DIR)
+            .long(CRASHES_DIR)
+            .required(true)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(REGRESSION_REPORTS_DIR)
+            .long(REGRESSION_REPORTS_DIR)
+            .required(local_job)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(REPORTS_DIR)
+            .long(REPORTS_DIR)
+            .required(false)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(NO_REPRO_DIR)
+            .long(NO_REPRO_DIR)
+            .required(false)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(UNIQUE_REPORTS_DIR)
+            .long(UNIQUE_REPORTS_DIR)
+            .value_parser(value_parser!(PathBuf))
+            .required(true),
+        Arg::new(CHECK_RETRY_COUNT)
+            .long(CHECK_RETRY_COUNT)
+            .value_parser(value_parser!(u64))
+            .default_value("0"),
+    ];
+    if local_job {
+        args.push(Arg::new(REPORT_NAMES).long(REPORT_NAMES).num_args(0..))
+    }
+    args
+}
+
+pub fn args(name: &'static str) -> Command {
+    Command::new(name)
+        .about("execute a local-only libfuzzer regression task")
+        .args(&build_shared_args(true))
+}
+
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct LibfuzzerRegression {
     target_exe: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs
index 5bef2347f7..9c6f16094e 100644
--- a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs
+++ b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs
@@ -1,14 +1,97 @@
 // Copyright (c) Microsoft Corporation.
 // Licensed under the MIT License.
 
+use crate::{
+    local::common::{
+        build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_RETRY_COUNT,
+        TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT,
+    },
+    tasks::report::libfuzzer_report::{test_input, TestInputArgs},
+};
 use anyhow::Result;
 use async_trait::async_trait;
+use clap::{Arg, Command};
+use flume::Sender;
 use onefuzz::machine_id::MachineIdentity;
 use schemars::JsonSchema;
 use std::{collections::HashMap, path::PathBuf};
 
 use super::template::{RunContext, Template};
 
+pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
+    let context = build_local_context(args, true, event_sender).await?;
+
+    let target_exe = args
+        .get_one::<PathBuf>(TARGET_EXE)
+        .expect("marked as required");
+    let target_env = get_cmd_env(CmdType::Target, args)?;
+    let target_options = get_cmd_arg(CmdType::Target, args);
+    let input = args
+        .get_one::<PathBuf>("input")
+        .expect("marked as required");
+    let target_timeout = args.get_one::<u64>(TARGET_TIMEOUT).copied();
+    let check_retry_count = args
+        .get_one::<u64>(CHECK_RETRY_COUNT)
+        .copied()
+        .expect("has a default value");
+
+    let extra_setup_dir = context.common_config.extra_setup_dir.as_deref();
+    let extra_output_dir = context
+        .common_config
+        .extra_output
+        .as_ref()
+        .map(|x| x.local_path.as_path());
+
+    let config = TestInputArgs {
+        target_exe: target_exe.as_path(),
+        target_env: &target_env,
+        target_options: &target_options,
+        input_url: None,
+        input: input.as_path(),
+        job_id: context.common_config.job_id,
+        task_id: context.common_config.task_id,
+        target_timeout,
+        check_retry_count,
+        setup_dir: &context.common_config.setup_dir,
+        extra_setup_dir,
+        extra_output_dir,
+        minimized_stack_depth: None,
+        machine_identity: context.common_config.machine_identity,
+    };
+
+    let result = test_input(config).await?;
+    println!("{}", serde_json::to_string_pretty(&result)?);
+    Ok(())
+}
+
+pub fn build_shared_args() -> Vec<Arg> {
+    vec![
+        Arg::new(TARGET_EXE).required(true),
+        Arg::new("input")
+            .required(true)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
+        Arg::new(TARGET_OPTIONS)
+            .default_value("{input}")
+            .long(TARGET_OPTIONS)
+            .value_delimiter(' ')
+            .help("Use a quoted string with space separation to denote multiple arguments"),
+        Arg::new(TARGET_TIMEOUT)
+            .long(TARGET_TIMEOUT)
+            .value_parser(value_parser!(u64)),
+        Arg::new(CHECK_RETRY_COUNT)
+            .long(CHECK_RETRY_COUNT)
+            .value_parser(value_parser!(u64))
+            .default_value("0"),
+    ]
+}
+
+pub fn args(name: &'static str) -> Command {
+    Command::new(name)
+        .about("test a libfuzzer application with a specific input")
+        .args(&build_shared_args())
+}
+
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct LibfuzzerTestInput {
     input: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/mod.rs b/src/agent/onefuzz-task/src/local/mod.rs
index 385ff8ffcd..03d394bcdb 100644
--- a/src/agent/onefuzz-task/src/local/mod.rs
+++ b/src/agent/onefuzz-task/src/local/mod.rs
@@ -14,6 +14,7 @@ pub mod libfuzzer_fuzz;
 pub mod libfuzzer_merge;
 pub mod libfuzzer_regression;
 pub mod libfuzzer_test_input;
+pub mod radamsa;
 pub mod template;
 pub mod test_input;
 pub mod tui;
diff --git a/src/agent/onefuzz-task/src/local/radamsa.rs b/src/agent/onefuzz-task/src/local/radamsa.rs
new file mode 100644
index 0000000000..4d84de027a
--- /dev/null
+++ b/src/agent/onefuzz-task/src/local/radamsa.rs
@@ -0,0 +1,78 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+
+use crate::{
+    local::{
+        common::{build_local_context, DirectoryMonitorQueue, UiEvent},
+        generic_crash_report::{build_report_config, build_shared_args as build_crash_args},
+        generic_generator::{build_fuzz_config, build_shared_args as build_fuzz_args},
+    },
+    tasks::{config::CommonConfig, fuzz::generator::GeneratorTask, report::generic::ReportTask},
+};
+use anyhow::{Context, Result};
+use clap::Command;
+use flume::Sender;
+use onefuzz::utils::try_wait_all_join_handles;
+use std::collections::HashSet;
+use tokio::task::spawn;
+use uuid::Uuid;
+
+pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
+    let context = build_local_context(args, true, event_sender.clone()).await?;
+    let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?;
+    let crash_dir = fuzz_config
+        .crashes
+        .remote_url()?
+        .as_file_path()
+        .ok_or_else(|| format_err!("invalid crash directory"))?;
+
+    tokio::fs::create_dir_all(&crash_dir)
+        .await
+        .with_context(|| {
+            format!(
+                "unable to create crashes directory: {}",
+                crash_dir.display()
+            )
+        })?;
+
+    let fuzzer = GeneratorTask::new(fuzz_config);
+    let fuzz_task = spawn(async move { fuzzer.run().await });
+
+    let crash_report_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir)
+        .await
+        .context("directory monitor failed")?;
+    let report_config = build_report_config(
+        args,
+        Some(crash_report_input_monitor.queue_client),
+        CommonConfig {
+            task_id: Uuid::new_v4(),
+            ..context.common_config.clone()
+        },
+        event_sender,
+    )?;
+    let report_task = spawn(async move { ReportTask::new(report_config).managed_run().await });
+
+    try_wait_all_join_handles(vec![
+        fuzz_task,
+        report_task,
+        crash_report_input_monitor.handle,
+    ])
+    .await?;
+
+    Ok(())
+}
+
+pub fn args(name: &'static str) -> Command {
+    let mut app = Command::new(name).about("run a local generator & crash reporting job");
+
+    let mut used = HashSet::new();
+    for args in &[build_fuzz_args(), build_crash_args()] {
+        for arg in args {
+            if used.insert(arg.get_id()) {
+                app = app.arg(arg);
+            }
+        }
+    }
+
+    app
+}
diff --git a/src/agent/onefuzz-task/src/local/schema.json b/src/agent/onefuzz-task/src/local/schema.json
index e5b00f6e17..0a1f128e67 100644
--- a/src/agent/onefuzz-task/src/local/schema.json
+++ b/src/agent/onefuzz-task/src/local/schema.json
@@ -126,6 +126,7 @@
             "analyzer_options",
             "target_exe",
             "target_options",
+            "tools",
             "type"
           ],
           "properties": {
@@ -181,10 +182,7 @@
               }
             },
             "tools": {
-              "type": [
-                "string",
-                "null"
-              ]
+              "type": "string"
             },
             "type": {
               "type": "string",
@@ -895,4 +893,4 @@
       ]
     }
   }
-}
\ No newline at end of file
+}
diff --git a/src/agent/onefuzz-task/src/local/template.rs b/src/agent/onefuzz-task/src/local/template.rs
index 73ae6e5e48..b2e0c425ff 100644
--- a/src/agent/onefuzz-task/src/local/template.rs
+++ b/src/agent/onefuzz-task/src/local/template.rs
@@ -196,7 +196,6 @@ pub async fn launch(
         job_id: Uuid::new_v4(),
         instance_id: Uuid::new_v4(),
         heartbeat_queue: None,
-        job_result_queue: None,
         instance_telemetry_key: None,
         microsoft_telemetry_key: None,
         logs: None,
@@ -242,10 +241,12 @@ mod test {
             .expect("Couldn't find checked-in schema.json")
             .replace("\r\n", "\n");
 
-        if schema_str.replace('\n', "") != checked_in_schema.replace('\n', "") {
-            std::fs::write("src/local/new.schema.json", schema_str)
-                .expect("The schemas did not match but failed to write new schema to file.");
-            panic!("The checked-in local fuzzing schema did not match the generated schema. The generated schema can be found at src/local/new.schema.json");
-        }
+        println!("{}", schema_str);
+
+        assert_eq!(
+            schema_str.replace('\n', ""),
+            checked_in_schema.replace('\n', ""),
+            "The checked-in local fuzzing schema did not match the generated schema."
+        );
     }
 }
diff --git a/src/agent/onefuzz-task/src/local/test_input.rs b/src/agent/onefuzz-task/src/local/test_input.rs
index b8027a7f41..4077bd08f8 100644
--- a/src/agent/onefuzz-task/src/local/test_input.rs
+++ b/src/agent/onefuzz-task/src/local/test_input.rs
@@ -1,8 +1,18 @@
 // Copyright (c) Microsoft Corporation.
 // Licensed under the MIT License.
 
+use crate::{
+    local::common::{
+        build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_ASAN_LOG,
+        CHECK_RETRY_COUNT, DISABLE_CHECK_DEBUGGER, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS,
+        TARGET_TIMEOUT,
+    },
+    tasks::report::generic::{test_input, TestInputArgs},
+};
 use anyhow::Result;
 use async_trait::async_trait;
+use clap::{Arg, ArgAction, Command};
+use flume::Sender;
 use onefuzz::machine_id::MachineIdentity;
 use schemars::JsonSchema;
 use std::{collections::HashMap, path::PathBuf};
@@ -10,6 +20,82 @@ use uuid::Uuid;
 
 use super::template::{RunContext, Template};
 
+pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
+    let context = build_local_context(args, false, event_sender).await?;
+
+    let target_exe = args
+        .get_one::<PathBuf>(TARGET_EXE)
+        .expect("is marked required");
+    let target_env = get_cmd_env(CmdType::Target, args)?;
+    let target_options = get_cmd_arg(CmdType::Target, args);
+    let input = args
+        .get_one::<PathBuf>("input")
+        .expect("is marked required");
+    let target_timeout = args.get_one::<u64>(TARGET_TIMEOUT).copied();
+    let check_retry_count = args
+        .get_one::<u64>(CHECK_RETRY_COUNT)
+        .copied()
+        .expect("has default value");
+    let check_asan_log = args.get_flag(CHECK_ASAN_LOG);
+    let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER);
+
+    let config = TestInputArgs {
+        target_exe: target_exe.as_path(),
+        target_env: &target_env,
+        target_options: &target_options,
+        input_url: None,
+        input: input.as_path(),
+        job_id: context.common_config.job_id,
+        task_id: context.common_config.task_id,
+        target_timeout,
+        check_retry_count,
+        setup_dir: &context.common_config.setup_dir,
+        extra_setup_dir: context.common_config.extra_setup_dir.as_deref(),
+        minimized_stack_depth: None,
+        check_asan_log,
+        check_debugger,
+        machine_identity: context.common_config.machine_identity.clone(),
+    };
+
+    let result = test_input(config).await?;
+    println!("{}", serde_json::to_string_pretty(&result)?);
+    Ok(())
+}
+
+pub fn build_shared_args() -> Vec<Arg> {
+    vec![
+        Arg::new(TARGET_EXE).required(true),
+        Arg::new("input")
+            .required(true)
+            .value_parser(value_parser!(PathBuf)),
+        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
+        Arg::new(TARGET_OPTIONS)
+            .default_value("{input}")
+            .long(TARGET_OPTIONS)
+            .value_delimiter(' ')
+            .help("Use a quoted string with space separation to denote multiple arguments"),
+        Arg::new(TARGET_TIMEOUT)
+            .long(TARGET_TIMEOUT)
+            .value_parser(value_parser!(u64)),
+        Arg::new(CHECK_RETRY_COUNT)
+            .long(CHECK_RETRY_COUNT)
+            .value_parser(value_parser!(u64))
+            .default_value("0"),
+        Arg::new(CHECK_ASAN_LOG)
+            .action(ArgAction::SetTrue)
+            .long(CHECK_ASAN_LOG),
+        Arg::new(DISABLE_CHECK_DEBUGGER)
+            .action(ArgAction::SetTrue)
+            .long("disable_check_debugger"),
+    ]
+}
+
+pub fn args(name: &'static str) -> Command {
+    Command::new(name)
+        .about("test an application with a specific input")
+        .args(&build_shared_args())
+}
+
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct TestInput {
     input: PathBuf,
diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs
index 05c6c3d169..3ba068a614 100644
--- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs
@@ -65,8 +65,6 @@ pub async fn run(config: Config) -> Result<()> {
         tools.init_pull().await?;
     }
 
-    let job_result_client = config.common.init_job_result().await?;
-
     // the tempdir is always created, however, the reports_path and
     // reports_monitor_future are only created if we have one of the three
     // report SyncedDir. The idea is that the option for where to write reports
@@ -90,7 +88,6 @@ pub async fn run(config: Config) -> Result<()> {
                 &config.unique_reports,
                 &config.reports,
                 &config.no_repro,
-                &job_result_client,
             );
             (
                 Some(reports_dir.path().to_path_buf()),
@@ -174,7 +171,7 @@ async fn poll_inputs(
                 }
                 message.delete().await?;
             } else {
-                debug!("no new candidate inputs found, sleeping");
+                warn!("no new candidate inputs found, sleeping");
                 delay_with_jitter(EMPTY_QUEUE_DELAY).await;
             }
         }
diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs
index e29e0fd60d..0848379d73 100644
--- a/src/agent/onefuzz-task/src/tasks/config.rs
+++ b/src/agent/onefuzz-task/src/tasks/config.rs
@@ -14,7 +14,6 @@ use onefuzz::{
     machine_id::MachineIdentity,
     syncdir::{SyncOperation, SyncedDir},
 };
-use onefuzz_result::job_result::{init_job_result, TaskJobResultClient};
 use onefuzz_telemetry::{
     self as telemetry, Event::task_start, EventData, InstanceTelemetryKey, MicrosoftTelemetryKey,
     Role,
@@ -51,8 +50,6 @@ pub struct CommonConfig {
 
     pub heartbeat_queue: Option<Url>,
 
-    pub job_result_queue: Option<Url>,
-
     pub instance_telemetry_key: Option<InstanceTelemetryKey>,
 
     pub microsoft_telemetry_key: Option<MicrosoftTelemetryKey>,
@@ -106,23 +103,6 @@ impl CommonConfig {
             None => Ok(None),
         }
     }
-
-    pub async fn init_job_result(&self) -> Result<Option<TaskJobResultClient>> {
-        match &self.job_result_queue {
-            Some(url) => {
-                let result = init_job_result(
-                    url.clone(),
-                    self.task_id,
-                    self.job_id,
-                    self.machine_identity.machine_id,
-                    self.machine_identity.machine_name.clone(),
-                )
-                .await?;
-                Ok(Some(result))
-            }
-            None => Ok(None),
-        }
-    }
 }
 
 #[derive(Debug, Deserialize)]
diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs
index 4fde9efb31..b112cfefbe 100644
--- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs
@@ -26,8 +26,6 @@ use onefuzz_file_format::coverage::{
     binary::{v1::BinaryCoverageJson as BinaryCoverageJsonV1, BinaryCoverageJson},
     source::{v1::SourceCoverageJson as SourceCoverageJsonV1, SourceCoverageJson},
 };
-use onefuzz_result::job_result::JobResultData;
-use onefuzz_result::job_result::{JobResultSender, TaskJobResultClient};
 use onefuzz_telemetry::{event, warn, Event::coverage_data, Event::coverage_failed, EventData};
 use storage_queue::{Message, QueueClient};
 use tokio::fs;
@@ -116,7 +114,7 @@ impl CoverageTask {
         let allowlist = self.load_target_allowlist().await?;
 
         let heartbeat = self.config.common.init_heartbeat(None).await?;
-        let job_result = self.config.common.init_job_result().await?;
+
         let mut seen_inputs = false;
 
         let target_exe_path =
@@ -131,7 +129,6 @@ impl CoverageTask {
             coverage,
             allowlist,
             heartbeat,
-            job_result,
             target_exe.to_string(),
         )?;
 
@@ -222,7 +219,6 @@ struct TaskContext<'a> {
     module_allowlist: AllowList,
     source_allowlist: Arc<AllowList>,
     heartbeat: Option<TaskHeartbeatClient>,
-    job_result: Option<TaskJobResultClient>,
     cache: Arc<DebugInfoCache>,
 }
 
@@ -232,7 +228,6 @@ impl<'a> TaskContext<'a> {
         coverage: BinaryCoverage,
         allowlist: TargetAllowList,
         heartbeat: Option<TaskHeartbeatClient>,
-        job_result: Option<TaskJobResultClient>,
         target_exe: String,
     ) -> Result<Self> {
         let cache = DebugInfoCache::new(allowlist.source_files.clone());
@@ -252,7 +247,6 @@ impl<'a> TaskContext<'a> {
             module_allowlist: allowlist.modules,
             source_allowlist: Arc::new(allowlist.source_files),
             heartbeat,
-            job_result,
             cache: Arc::new(cache),
         })
     }
@@ -461,16 +455,7 @@ impl<'a> TaskContext<'a> {
         let s = CoverageStats::new(&coverage);
         event!(coverage_data; Covered = s.covered, Features = s.features, Rate = s.rate);
         metric!(coverage_data; 1.0; Covered = s.covered, Features = s.features, Rate = s.rate);
-        self.job_result
-            .send_direct(
-                JobResultData::CoverageData,
-                HashMap::from([
-                    ("covered".to_string(), s.covered as f64),
-                    ("features".to_string(), s.features as f64),
-                    ("rate".to_string(), s.rate),
-                ]),
-            )
-            .await;
+
         Ok(())
     }
 
diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs
index bd7511cac2..d9116a1ed2 100644
--- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs
+++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs
@@ -73,7 +73,6 @@ impl GeneratorTask {
         }
 
         let hb_client = self.config.common.init_heartbeat(None).await?;
-        let jr_client = self.config.common.init_job_result().await?;
 
         for dir in &self.config.readonly_inputs {
             dir.init_pull().await?;
@@ -85,10 +84,7 @@ impl GeneratorTask {
             self.config.ensemble_sync_delay,
         );
 
-        let crash_dir_monitor = self
-            .config
-            .crashes
-            .monitor_results(new_result, false, &jr_client);
+        let crash_dir_monitor = self.config.crashes.monitor_results(new_result, false);
 
         let fuzzer = self.fuzzing_loop(hb_client);
 
@@ -302,7 +298,6 @@ mod tests {
                 task_id: Default::default(),
                 instance_id: Default::default(),
                 heartbeat_queue: Default::default(),
-                job_result_queue: Default::default(),
                 instance_telemetry_key: Default::default(),
                 microsoft_telemetry_key: Default::default(),
                 logs: Default::default(),
diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs
index bfd9f3f5cc..4f8c67ae8e 100644
--- a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs
+++ b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs
@@ -1,11 +1,7 @@
 // Copyright (c) Microsoft Corporation.
 // Licensed under the MIT License.
 
-use crate::tasks::{
-    config::CommonConfig,
-    heartbeat::{HeartbeatSender, TaskHeartbeatClient},
-    utils::default_bool_true,
-};
+use crate::tasks::{config::CommonConfig, heartbeat::HeartbeatSender, utils::default_bool_true};
 use anyhow::{Context, Result};
 use arraydeque::{ArrayDeque, Wrapping};
 use async_trait::async_trait;
@@ -16,7 +12,6 @@ use onefuzz::{
     process::ExitStatus,
     syncdir::{continuous_sync, SyncOperation::Pull, SyncedDir},
 };
-use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient};
 use onefuzz_telemetry::{
     Event::{new_coverage, new_crashdump, new_result, runtime_stats},
     EventData,
@@ -131,31 +126,21 @@ where
         self.verify().await?;
 
         let hb_client = self.config.common.init_heartbeat(None).await?;
-        let jr_client = self.config.common.init_job_result().await?;
 
         // To be scheduled.
         let resync = self.continuous_sync_inputs();
-
-        let new_inputs = self
-            .config
-            .inputs
-            .monitor_results(new_coverage, true, &jr_client);
-        let new_crashes = self
-            .config
-            .crashes
-            .monitor_results(new_result, true, &jr_client);
+        let new_inputs = self.config.inputs.monitor_results(new_coverage, true);
+        let new_crashes = self.config.crashes.monitor_results(new_result, true);
         let new_crashdumps = async {
             if let Some(crashdumps) = &self.config.crashdumps {
-                crashdumps
-                    .monitor_results(new_crashdump, true, &jr_client)
-                    .await
+                crashdumps.monitor_results(new_crashdump, true).await
             } else {
                 Ok(())
             }
         };
 
         let (stats_sender, stats_receiver) = mpsc::unbounded_channel();
-        let report_stats = report_runtime_stats(stats_receiver, &hb_client, &jr_client);
+        let report_stats = report_runtime_stats(stats_receiver, hb_client);
         let fuzzers = self.run_fuzzers(Some(&stats_sender));
         futures::try_join!(
             resync,
@@ -198,7 +183,7 @@ where
             .inputs
             .local_path
             .parent()
-            .ok_or_else(|| anyhow!("invalid input path"))?;
+            .ok_or_else(|| anyhow!("Invalid input path"))?;
         let temp_path = task_dir.join(".temp");
         tokio::fs::create_dir_all(&temp_path).await?;
         let temp_dir = tempdir_in(temp_path)?;
@@ -516,7 +501,7 @@ impl TotalStats {
         self.execs_sec = self.worker_stats.values().map(|x| x.execs_sec).sum();
     }
 
-    async fn report(&self, jr_client: &Option<TaskJobResultClient>) {
+    fn report(&self) {
         event!(
             runtime_stats;
             EventData::Count = self.count,
@@ -528,17 +513,6 @@ impl TotalStats {
             EventData::Count = self.count,
             EventData::ExecsSecond = self.execs_sec
         );
-        if let Some(jr_client) = jr_client {
-            let _ = jr_client
-                .send_direct(
-                    JobResultData::RuntimeStats,
-                    HashMap::from([
-                        ("total_count".to_string(), self.count as f64),
-                        ("execs_sec".to_string(), self.execs_sec),
-                    ]),
-                )
-                .await;
-        }
     }
 }
 
@@ -568,8 +542,7 @@ impl Timer {
 // are approximating nearest-neighbor interpolation on the runtime stats time series.
 async fn report_runtime_stats(
     mut stats_channel: mpsc::UnboundedReceiver<RuntimeStats>,
-    heartbeat_client: &Option<TaskHeartbeatClient>,
-    jr_client: &Option<TaskJobResultClient>,
+    heartbeat_client: impl HeartbeatSender,
 ) -> Result<()> {
     // Cache the last-reported stats for a given worker.
     //
@@ -578,7 +551,7 @@ async fn report_runtime_stats(
     let mut total = TotalStats::default();
 
     // report all zeros to start
-    total.report(jr_client).await;
+    total.report();
 
     let timer = Timer::new(RUNTIME_STATS_PERIOD);
 
@@ -587,10 +560,10 @@ async fn report_runtime_stats(
             Some(stats) = stats_channel.recv() => {
                 heartbeat_client.alive();
                 total.update(stats);
-                total.report(jr_client).await
+                total.report()
             }
             _ = timer.wait() => {
-                total.report(jr_client).await
+                total.report()
             }
         }
     }
diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs
index 3f00e20b8d..de1e1106ba 100644
--- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs
+++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs
@@ -79,10 +79,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
         remote_path: config.crashes.remote_path.clone(),
     };
     crashes.init().await?;
-
-    let jr_client = config.common.init_job_result().await?;
-
-    let monitor_crashes = crashes.monitor_results(new_result, false, &jr_client);
+    let monitor_crashes = crashes.monitor_results(new_result, false);
 
     // setup crashdumps
     let (crashdump_dir, monitor_crashdumps) = {
@@ -98,12 +95,9 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
         };
 
         let monitor_dir = crashdump_dir.clone();
-        let monitor_jr_client = config.common.init_job_result().await?;
         let monitor_crashdumps = async move {
             if let Some(crashdumps) = monitor_dir {
-                crashdumps
-                    .monitor_results(new_crashdump, false, &monitor_jr_client)
-                    .await
+                crashdumps.monitor_results(new_crashdump, false).await
             } else {
                 Ok(())
             }
@@ -135,13 +129,11 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
     if let Some(no_repro) = &config.no_repro {
         no_repro.init().await?;
     }
-
     let monitor_reports_future = monitor_reports(
         reports_dir.path(),
         &config.unique_reports,
         &config.reports,
         &config.no_repro,
-        &jr_client,
     );
 
     let inputs = SyncedDir {
@@ -164,7 +156,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
             delay_with_jitter(delay).await;
         }
     }
-    let monitor_inputs = inputs.monitor_results(new_coverage, false, &jr_client);
+    let monitor_inputs = inputs.monitor_results(new_coverage, false);
     let inputs_sync_cancellation = CancellationToken::new(); // never actually cancelled
     let inputs_sync_task =
         inputs.continuous_sync(Pull, config.ensemble_sync_delay, &inputs_sync_cancellation);
@@ -452,7 +444,6 @@ mod tests {
                 task_id: Default::default(),
                 instance_id: Default::default(),
                 heartbeat_queue: Default::default(),
-                job_result_queue: Default::default(),
                 instance_telemetry_key: Default::default(),
                 microsoft_telemetry_key: Default::default(),
                 logs: Default::default(),
diff --git a/src/agent/onefuzz-task/src/tasks/heartbeat.rs b/src/agent/onefuzz-task/src/tasks/heartbeat.rs
index e13b661909..515fa39d0c 100644
--- a/src/agent/onefuzz-task/src/tasks/heartbeat.rs
+++ b/src/agent/onefuzz-task/src/tasks/heartbeat.rs
@@ -1,8 +1,8 @@
 // Copyright (c) Microsoft Corporation.
 // Licensed under the MIT License.
 
+use crate::onefuzz::heartbeat::HeartbeatClient;
 use anyhow::Result;
-use onefuzz::heartbeat::HeartbeatClient;
 use reqwest::Url;
 use serde::{self, Deserialize, Serialize};
 use std::time::Duration;
diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs
index 3b6a2094d8..4f2e8234a8 100644
--- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs
@@ -83,7 +83,7 @@ pub async fn spawn(config: &Config) -> Result<()> {
                 }
             }
         } else {
-            debug!("no new candidate inputs found, sleeping");
+            warn!("no new candidate inputs found, sleeping");
             delay_with_jitter(EMPTY_QUEUE_DELAY).await;
         };
     }
diff --git a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs
index 2d53bc8c07..1c334b3f18 100644
--- a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs
+++ b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs
@@ -120,7 +120,7 @@ async fn process_message(config: &Config, input_queue: QueueClient) -> Result<()
         }
         Ok(())
     } else {
-        debug!("no new candidate inputs found, sleeping");
+        warn!("no new candidate inputs found, sleeping");
         delay_with_jitter(EMPTY_QUEUE_DELAY).await;
         Ok(())
     }
diff --git a/src/agent/onefuzz-task/src/tasks/regression/common.rs b/src/agent/onefuzz-task/src/tasks/regression/common.rs
index b61a97df4c..60023cfa6e 100644
--- a/src/agent/onefuzz-task/src/tasks/regression/common.rs
+++ b/src/agent/onefuzz-task/src/tasks/regression/common.rs
@@ -2,14 +2,12 @@
 // Licensed under the MIT License.
 
 use crate::tasks::{
-    config::CommonConfig,
     heartbeat::{HeartbeatSender, TaskHeartbeatClient},
     report::crash_report::{parse_report_file, CrashTestResult, RegressionReport},
 };
 use anyhow::{Context, Result};
 use async_trait::async_trait;
 use onefuzz::syncdir::SyncedDir;
-use onefuzz_result::job_result::TaskJobResultClient;
 use reqwest::Url;
 use std::path::PathBuf;
 
@@ -26,7 +24,7 @@ pub trait RegressionHandler {
 
 /// Runs the regression task
 pub async fn run(
-    common_config: &CommonConfig,
+    heartbeat_client: Option<TaskHeartbeatClient>,
     regression_reports: &SyncedDir,
     crashes: &SyncedDir,
     report_dirs: &[&SyncedDir],
@@ -37,9 +35,6 @@ pub async fn run(
     info!("starting regression task");
     regression_reports.init().await?;
 
-    let heartbeat_client = common_config.init_heartbeat(None).await?;
-    let job_result_client = common_config.init_job_result().await?;
-
     handle_crash_reports(
         handler,
         crashes,
@@ -47,7 +42,6 @@ pub async fn run(
         report_list,
         regression_reports,
         &heartbeat_client,
-        &job_result_client,
     )
     .await
     .context("handling crash reports")?;
@@ -58,7 +52,6 @@ pub async fn run(
             readonly_inputs,
             regression_reports,
             &heartbeat_client,
-            &job_result_client,
         )
         .await
         .context("handling inputs")?;
@@ -78,7 +71,6 @@ pub async fn handle_inputs(
     readonly_inputs: &SyncedDir,
     regression_reports: &SyncedDir,
     heartbeat_client: &Option<TaskHeartbeatClient>,
-    job_result_client: &Option<TaskJobResultClient>,
 ) -> Result<()> {
     readonly_inputs.init_pull().await?;
     let mut input_files = tokio::fs::read_dir(&readonly_inputs.local_path).await?;
@@ -103,7 +95,7 @@ pub async fn handle_inputs(
             crash_test_result,
             original_crash_test_result: None,
         }
-        .save(None, regression_reports, job_result_client)
+        .save(None, regression_reports)
         .await?
     }
 
@@ -117,7 +109,6 @@ pub async fn handle_crash_reports(
     report_list: &Option<Vec<String>>,
     regression_reports: &SyncedDir,
     heartbeat_client: &Option<TaskHeartbeatClient>,
-    job_result_client: &Option<TaskJobResultClient>,
 ) -> Result<()> {
     // without crash report containers, skip this method
     if report_dirs.is_empty() {
@@ -167,7 +158,7 @@ pub async fn handle_crash_reports(
                 crash_test_result,
                 original_crash_test_result: Some(original_crash_test_result),
             }
-            .save(Some(file_name), regression_reports, job_result_client)
+            .save(Some(file_name), regression_reports)
             .await?
         }
     }
diff --git a/src/agent/onefuzz-task/src/tasks/regression/generic.rs b/src/agent/onefuzz-task/src/tasks/regression/generic.rs
index 8570208d59..640e80db9a 100644
--- a/src/agent/onefuzz-task/src/tasks/regression/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/regression/generic.rs
@@ -89,6 +89,7 @@ impl GenericRegressionTask {
 
     pub async fn run(&self) -> Result<()> {
         info!("Starting generic regression task");
+        let heartbeat_client = self.config.common.init_heartbeat(None).await?;
 
         let mut report_dirs = vec![];
         for dir in vec![
@@ -102,7 +103,7 @@ impl GenericRegressionTask {
             report_dirs.push(dir);
         }
         common::run(
-            &self.config.common,
+            heartbeat_client,
             &self.config.regression_reports,
             &self.config.crashes,
             &report_dirs,
diff --git a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs
index e65f46bb64..06dd7c00d9 100644
--- a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs
+++ b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs
@@ -103,8 +103,9 @@ impl LibFuzzerRegressionTask {
             report_dirs.push(dir);
         }
 
+        let heartbeat_client = self.config.common.init_heartbeat(None).await?;
         common::run(
-            &self.config.common,
+            heartbeat_client,
             &self.config.regression_reports,
             &self.config.crashes,
             &report_dirs,
diff --git a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs
index 290b98ccde..23171bc432 100644
--- a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs
+++ b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs
@@ -3,7 +3,6 @@
 
 use anyhow::{Context, Result};
 use onefuzz::{blob::BlobUrl, monitor::DirectoryMonitor, syncdir::SyncedDir};
-use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient};
 use onefuzz_telemetry::{
     Event::{
         new_report, new_unable_to_reproduce, new_unique_report, regression_report,
@@ -13,7 +12,6 @@ use onefuzz_telemetry::{
 };
 use serde::{Deserialize, Serialize};
 use stacktrace_parser::CrashLog;
-use std::collections::HashMap;
 use std::path::{Path, PathBuf};
 use uuid::Uuid;
 
@@ -113,7 +111,6 @@ impl RegressionReport {
         self,
         report_name: Option<String>,
         regression_reports: &SyncedDir,
-        jr_client: &Option<TaskJobResultClient>,
     ) -> Result<()> {
         let (event, name) = match &self.crash_test_result {
             CrashTestResult::CrashReport(report) => {
@@ -129,15 +126,6 @@ impl RegressionReport {
         if upload_or_save_local(&self, &name, regression_reports).await? {
             event!(event; EventData::Path = name.clone());
             metric!(event; 1.0; EventData::Path = name.clone());
-
-            if let Some(jr_client) = jr_client {
-                let _ = jr_client
-                    .send_direct(
-                        JobResultData::NewRegressionReport,
-                        HashMap::from([("count".to_string(), 1.0)]),
-                    )
-                    .await;
-            }
         }
         Ok(())
     }
@@ -161,7 +149,6 @@ impl CrashTestResult {
         unique_reports: &Option<SyncedDir>,
         reports: &Option<SyncedDir>,
         no_repro: &Option<SyncedDir>,
-        jr_client: &Option<TaskJobResultClient>,
     ) -> Result<()> {
         match self {
             Self::CrashReport(report) => {
@@ -171,15 +158,6 @@ impl CrashTestResult {
                     if upload_or_save_local(&report, &name, unique_reports).await? {
                         event!(new_unique_report; EventData::Path = report.unique_blob_name());
                         metric!(new_unique_report; 1.0; EventData::Path = report.unique_blob_name());
-
-                        if let Some(jr_client) = jr_client {
-                            let _ = jr_client
-                                .send_direct(
-                                    JobResultData::NewUniqueReport,
-                                    HashMap::from([("count".to_string(), 1.0)]),
-                                )
-                                .await;
-                        }
                     }
                 }
 
@@ -188,15 +166,6 @@ impl CrashTestResult {
                     if upload_or_save_local(&report, &name, reports).await? {
                         event!(new_report; EventData::Path = report.blob_name());
                         metric!(new_report; 1.0; EventData::Path = report.blob_name());
-
-                        if let Some(jr_client) = jr_client {
-                            let _ = jr_client
-                                .send_direct(
-                                    JobResultData::NewReport,
-                                    HashMap::from([("count".to_string(), 1.0)]),
-                                )
-                                .await;
-                        }
                     }
                 }
             }
@@ -207,15 +176,6 @@ impl CrashTestResult {
                     if upload_or_save_local(&report, &name, no_repro).await? {
                         event!(new_unable_to_reproduce; EventData::Path = report.blob_name());
                         metric!(new_unable_to_reproduce; 1.0; EventData::Path = report.blob_name());
-
-                        if let Some(jr_client) = jr_client {
-                            let _ = jr_client
-                                .send_direct(
-                                    JobResultData::NoReproCrashingInput,
-                                    HashMap::from([("count".to_string(), 1.0)]),
-                                )
-                                .await;
-                        }
                     }
                 }
             }
@@ -364,7 +324,6 @@ pub async fn monitor_reports(
     unique_reports: &Option<SyncedDir>,
     reports: &Option<SyncedDir>,
     no_crash: &Option<SyncedDir>,
-    jr_client: &Option<TaskJobResultClient>,
 ) -> Result<()> {
     if unique_reports.is_none() && reports.is_none() && no_crash.is_none() {
         debug!("no report directories configured");
@@ -375,9 +334,7 @@ pub async fn monitor_reports(
 
     while let Some(file) = monitor.next_file().await? {
         let result = parse_report_file(file).await?;
-        result
-            .save(unique_reports, reports, no_crash, jr_client)
-            .await?;
+        result.save(unique_reports, reports, no_crash).await?;
     }
 
     Ok(())
diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs
index b8659845de..9b626a7d89 100644
--- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs
@@ -8,25 +8,25 @@ use std::{
     sync::Arc,
 };
 
-use crate::tasks::report::crash_report::*;
-use crate::tasks::report::dotnet::common::collect_exception_info;
-use crate::tasks::{
-    config::CommonConfig,
-    generic::input_poller::*,
-    heartbeat::{HeartbeatSender, TaskHeartbeatClient},
-    utils::{default_bool_true, try_resolve_setup_relative_path},
-};
 use anyhow::{Context, Result};
 use async_trait::async_trait;
 use onefuzz::expand::Expand;
 use onefuzz::fs::set_executable;
 use onefuzz::{blob::BlobUrl, sha256, syncdir::SyncedDir};
-use onefuzz_result::job_result::TaskJobResultClient;
 use reqwest::Url;
 use serde::Deserialize;
 use storage_queue::{Message, QueueClient};
 use tokio::fs;
 
+use crate::tasks::report::crash_report::*;
+use crate::tasks::report::dotnet::common::collect_exception_info;
+use crate::tasks::{
+    config::CommonConfig,
+    generic::input_poller::*,
+    heartbeat::{HeartbeatSender, TaskHeartbeatClient},
+    utils::{default_bool_true, try_resolve_setup_relative_path},
+};
+
 const DOTNET_DUMP_TOOL_NAME: &str = "dotnet-dump";
 
 #[derive(Debug, Deserialize)]
@@ -114,18 +114,15 @@ impl DotnetCrashReportTask {
 pub struct AsanProcessor {
     config: Arc<Config>,
     heartbeat_client: Option<TaskHeartbeatClient>,
-    job_result_client: Option<TaskJobResultClient>,
 }
 
 impl AsanProcessor {
     pub async fn new(config: Arc<Config>) -> Result<Self> {
         let heartbeat_client = config.common.init_heartbeat(None).await?;
-        let job_result_client = config.common.init_job_result().await?;
 
         Ok(Self {
             config,
             heartbeat_client,
-            job_result_client,
         })
     }
 
@@ -263,7 +260,6 @@ impl Processor for AsanProcessor {
                 &self.config.unique_reports,
                 &self.config.reports,
                 &self.config.no_repro,
-                &self.job_result_client,
             )
             .await;
 
diff --git a/src/agent/onefuzz-task/src/tasks/report/generic.rs b/src/agent/onefuzz-task/src/tasks/report/generic.rs
index 8ad259f0a5..9088f98acc 100644
--- a/src/agent/onefuzz-task/src/tasks/report/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/report/generic.rs
@@ -13,7 +13,6 @@ use async_trait::async_trait;
 use onefuzz::{
     blob::BlobUrl, input_tester::Tester, machine_id::MachineIdentity, sha256, syncdir::SyncedDir,
 };
-use onefuzz_result::job_result::TaskJobResultClient;
 use reqwest::Url;
 use serde::Deserialize;
 use std::{
@@ -74,9 +73,7 @@ impl ReportTask {
     pub async fn managed_run(&mut self) -> Result<()> {
         info!("Starting generic crash report task");
         let heartbeat_client = self.config.common.init_heartbeat(None).await?;
-        let job_result_client = self.config.common.init_job_result().await?;
-        let mut processor =
-            GenericReportProcessor::new(&self.config, heartbeat_client, job_result_client);
+        let mut processor = GenericReportProcessor::new(&self.config, heartbeat_client);
 
         #[allow(clippy::manual_flatten)]
         for entry in [
@@ -186,19 +183,13 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result<CrashTestResult> {
 pub struct GenericReportProcessor<'a> {
     config: &'a Config,
     heartbeat_client: Option<TaskHeartbeatClient>,
-    job_result_client: Option<TaskJobResultClient>,
 }
 
 impl<'a> GenericReportProcessor<'a> {
-    pub fn new(
-        config: &'a Config,
-        heartbeat_client: Option<TaskHeartbeatClient>,
-        job_result_client: Option<TaskJobResultClient>,
-    ) -> Self {
+    pub fn new(config: &'a Config, heartbeat_client: Option<TaskHeartbeatClient>) -> Self {
         Self {
             config,
             heartbeat_client,
-            job_result_client,
         }
     }
 
@@ -248,7 +239,6 @@ impl<'a> Processor for GenericReportProcessor<'a> {
                 &self.config.unique_reports,
                 &self.config.reports,
                 &self.config.no_repro,
-                &self.job_result_client,
             )
             .await
             .context("saving report failed")
diff --git a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs
index 587ed2e3dc..f18f638fa3 100644
--- a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs
+++ b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs
@@ -13,7 +13,6 @@ use async_trait::async_trait;
 use onefuzz::{
     blob::BlobUrl, libfuzzer::LibFuzzer, machine_id::MachineIdentity, sha256, syncdir::SyncedDir,
 };
-use onefuzz_result::job_result::TaskJobResultClient;
 use reqwest::Url;
 use serde::Deserialize;
 use std::{
@@ -197,18 +196,15 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result<CrashTestResult> {
 pub struct AsanProcessor {
     config: Arc<Config>,
     heartbeat_client: Option<TaskHeartbeatClient>,
-    job_result_client: Option<TaskJobResultClient>,
 }
 
 impl AsanProcessor {
     pub async fn new(config: Arc<Config>) -> Result<Self> {
         let heartbeat_client = config.common.init_heartbeat(None).await?;
-        let job_result_client = config.common.init_job_result().await?;
 
         Ok(Self {
             config,
             heartbeat_client,
-            job_result_client,
         })
     }
 
@@ -261,7 +257,6 @@ impl Processor for AsanProcessor {
                 &self.config.unique_reports,
                 &self.config.reports,
                 &self.config.no_repro,
-                &self.job_result_client,
             )
             .await
     }
diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml
index 1f3c27985c..c096c8ddfc 100644
--- a/src/agent/onefuzz/Cargo.toml
+++ b/src/agent/onefuzz/Cargo.toml
@@ -44,7 +44,6 @@ tempfile = "3.7.0"
 process_control = "4.0"
 reqwest-retry = { path = "../reqwest-retry" }
 onefuzz-telemetry = { path = "../onefuzz-telemetry" }
-onefuzz-result = { path = "../onefuzz-result" }
 stacktrace-parser = { path = "../stacktrace-parser" }
 backoff = { version = "0.4", features = ["tokio"] }
 
diff --git a/src/agent/onefuzz/src/blob/url.rs b/src/agent/onefuzz/src/blob/url.rs
index 134b59dea0..f55ffbb23a 100644
--- a/src/agent/onefuzz/src/blob/url.rs
+++ b/src/agent/onefuzz/src/blob/url.rs
@@ -192,15 +192,10 @@ impl BlobContainerUrl {
     }
 
     pub fn as_path(&self, prefix: impl AsRef<Path>) -> Result<PathBuf> {
-        match (self.account(), self.container()) {
-            (Some(account), Some(container)) => {
-                let mut path = PathBuf::new();
-                path.push(account);
-                path.push(container);
-                Ok(prefix.as_ref().join(path))
-            }
-            _ => bail!("Invalid container Url"),
-        }
+        let dir = self
+            .account()
+            .ok_or_else(|| anyhow!("Invalid container Url"))?;
+        Ok(prefix.as_ref().join(dir))
     }
 }
 
@@ -531,14 +526,4 @@ mod tests {
             "id:000000,sig:06,src:000000,op:havoc,rep:128"
         );
     }
-
-    #[test]
-    fn test_as_path() -> Result<()> {
-        let root = PathBuf::from(r"/onefuzz");
-        let url = BlobContainerUrl::parse("https://myaccount.blob.core.windows.net/mycontainer")?;
-        let path = url.as_path(root)?;
-        assert_eq!(PathBuf::from(r"/onefuzz/myaccount/mycontainer"), path);
-
-        Ok(())
-    }
 }
diff --git a/src/agent/onefuzz/src/syncdir.rs b/src/agent/onefuzz/src/syncdir.rs
index 2e73b7a694..0252099561 100644
--- a/src/agent/onefuzz/src/syncdir.rs
+++ b/src/agent/onefuzz/src/syncdir.rs
@@ -11,12 +11,10 @@ use crate::{
 };
 use anyhow::{Context, Result};
 use dunce::canonicalize;
-use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient};
 use onefuzz_telemetry::{Event, EventData};
 use reqwest::{StatusCode, Url};
 use reqwest_retry::{RetryCheck, SendRetry, DEFAULT_RETRY_PERIOD, MAX_RETRY_ATTEMPTS};
 use serde::{Deserialize, Serialize};
-use std::collections::HashMap;
 use std::{env::current_dir, path::PathBuf, str, time::Duration};
 use tokio::{fs, select};
 use tokio_util::sync::CancellationToken;
@@ -243,7 +241,6 @@ impl SyncedDir {
         url: BlobContainerUrl,
         event: Event,
         ignore_dotfiles: bool,
-        jr_client: &Option<TaskJobResultClient>,
     ) -> Result<()> {
         debug!("monitoring {}", path.display());
 
@@ -268,39 +265,9 @@ impl SyncedDir {
                 if ignore_dotfiles && file_name_event_str.starts_with('.') {
                     continue;
                 }
+
                 event!(event.clone(); EventData::Path = file_name_event_str);
                 metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str);
-                if let Some(jr_client) = jr_client {
-                    match event {
-                        Event::new_result => {
-                            jr_client
-                                .send_direct(
-                                    JobResultData::NewCrashingInput,
-                                    HashMap::from([("count".to_string(), 1.0)]),
-                                )
-                                .await;
-                        }
-                        Event::new_coverage => {
-                            jr_client
-                                .send_direct(
-                                    JobResultData::CoverageData,
-                                    HashMap::from([("count".to_string(), 1.0)]),
-                                )
-                                .await;
-                        }
-                        Event::new_crashdump => {
-                            jr_client
-                                .send_direct(
-                                    JobResultData::NewCrashDump,
-                                    HashMap::from([("count".to_string(), 1.0)]),
-                                )
-                                .await;
-                        }
-                        _ => {
-                            warn!("Unhandled job result!");
-                        }
-                    }
-                }
                 let destination = path.join(file_name);
                 if let Err(err) = fs::copy(&item, &destination).await {
                     let error_message = format!(
@@ -338,29 +305,6 @@ impl SyncedDir {
 
                 event!(event.clone(); EventData::Path = file_name_event_str);
                 metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str);
-                if let Some(jr_client) = jr_client {
-                    match event {
-                        Event::new_result => {
-                            jr_client
-                                .send_direct(
-                                    JobResultData::NewCrashingInput,
-                                    HashMap::from([("count".to_string(), 1.0)]),
-                                )
-                                .await;
-                        }
-                        Event::new_coverage => {
-                            jr_client
-                                .send_direct(
-                                    JobResultData::CoverageData,
-                                    HashMap::from([("count".to_string(), 1.0)]),
-                                )
-                                .await;
-                        }
-                        _ => {
-                            warn!("Unhandled job result!");
-                        }
-                    }
-                }
                 if let Err(err) = uploader.upload(item.clone()).await {
                     let error_message = format!(
                         "Couldn't upload file.  path:{} dir:{} err:{:?}",
@@ -392,12 +336,7 @@ impl SyncedDir {
     /// The intent of this is to support use cases where we usually want a directory
     /// to be initialized, but a user-supplied binary, (such as AFL) logically owns
     /// a directory, and may reset it.
-    pub async fn monitor_results(
-        &self,
-        event: Event,
-        ignore_dotfiles: bool,
-        job_result_client: &Option<TaskJobResultClient>,
-    ) -> Result<()> {
+    pub async fn monitor_results(&self, event: Event, ignore_dotfiles: bool) -> Result<()> {
         if let Some(url) = self.remote_path.clone() {
             loop {
                 debug!("waiting to monitor {}", self.local_path.display());
@@ -416,7 +355,6 @@ impl SyncedDir {
                     url.clone(),
                     event.clone(),
                     ignore_dotfiles,
-                    job_result_client,
                 )
                 .await?;
             }
diff --git a/src/deployment/bicep-templates/storageAccounts.bicep b/src/deployment/bicep-templates/storageAccounts.bicep
index 27f2da21d8..6a96cea6a0 100644
--- a/src/deployment/bicep-templates/storageAccounts.bicep
+++ b/src/deployment/bicep-templates/storageAccounts.bicep
@@ -33,7 +33,7 @@ var storageAccountFuncQueuesParams = [
   'update-queue'
   'webhooks'
   'signalr-events'
-  'job-result'
+  'custom-metrics'
 ]
 var fileChangesQueueIndex = 0
 
diff --git a/src/integration-tests/integration-test.py b/src/integration-tests/integration-test.py
index 15ffcfb9fe..057404ceff 100755
--- a/src/integration-tests/integration-test.py
+++ b/src/integration-tests/integration-test.py
@@ -88,7 +88,6 @@ class Integration(BaseModel):
     target_method: Optional[str]
     setup_dir: Optional[str]
     target_env: Optional[Dict[str, str]]
-    pool: PoolName
 
 
 TARGETS: Dict[str, Integration] = {
@@ -98,7 +97,6 @@ class Integration(BaseModel):
         target_exe="fuzz.exe",
         inputs="seeds",
         wait_for_files={ContainerType.unique_reports: 1},
-        pool="linux",
     ),
     "linux-libfuzzer": Integration(
         template=TemplateType.libfuzzer,
@@ -126,7 +124,6 @@ class Integration(BaseModel):
             "--only_asan_failures",
             "--write_test_file={extra_output_dir}/test.txt",
         ],
-        pool="linux",
     ),
     "linux-libfuzzer-with-options": Integration(
         template=TemplateType.libfuzzer,
@@ -140,7 +137,6 @@ class Integration(BaseModel):
         },
         reboot_after_setup=True,
         fuzzing_target_options=["-runs=10000000"],
-        pool="linux",
     ),
     "linux-libfuzzer-dlopen": Integration(
         template=TemplateType.libfuzzer,
@@ -154,7 +150,6 @@ class Integration(BaseModel):
         },
         reboot_after_setup=True,
         use_setup=True,
-        pool="linux",
     ),
     "linux-libfuzzer-linked-library": Integration(
         template=TemplateType.libfuzzer,
@@ -168,7 +163,6 @@ class Integration(BaseModel):
         },
         reboot_after_setup=True,
         use_setup=True,
-        pool="linux",
     ),
     "linux-libfuzzer-dotnet": Integration(
         template=TemplateType.libfuzzer_dotnet,
@@ -186,7 +180,6 @@ class Integration(BaseModel):
             ContainerType.unique_reports: 1,
         },
         test_repro=False,
-        pool="linux",
     ),
     "linux-libfuzzer-aarch64-crosscompile": Integration(
         template=TemplateType.libfuzzer_qemu_user,
@@ -196,7 +189,6 @@ class Integration(BaseModel):
         use_setup=True,
         wait_for_files={ContainerType.inputs: 2, ContainerType.crashes: 1},
         test_repro=False,
-        pool="linux",
     ),
     "linux-libfuzzer-rust": Integration(
         template=TemplateType.libfuzzer,
@@ -204,7 +196,6 @@ class Integration(BaseModel):
         target_exe="fuzz_target_1",
         wait_for_files={ContainerType.unique_reports: 1, ContainerType.coverage: 1},
         fuzzing_target_options=["--test:{extra_setup_dir}"],
-        pool="linux",
     ),
     "linux-trivial-crash": Integration(
         template=TemplateType.radamsa,
@@ -213,7 +204,6 @@ class Integration(BaseModel):
         inputs="seeds",
         wait_for_files={ContainerType.unique_reports: 1},
         inject_fake_regression=True,
-        pool="linux",
     ),
     "linux-trivial-crash-asan": Integration(
         template=TemplateType.radamsa,
@@ -223,28 +213,6 @@ class Integration(BaseModel):
         wait_for_files={ContainerType.unique_reports: 1},
         check_asan_log=True,
         disable_check_debugger=True,
-        pool="linux",
-    ),
-    # TODO: Don't install OMS extension on linux anymore
-    # TODO: Figure out why non mariner work is being scheduled to the mariner pool
-    "mariner-libfuzzer": Integration(
-        template=TemplateType.libfuzzer,
-        os=OS.linux,
-        target_exe="fuzz.exe",
-        inputs="seeds",
-        wait_for_files={
-            ContainerType.unique_reports: 1,
-            ContainerType.coverage: 1,
-            ContainerType.inputs: 2,
-            ContainerType.extra_output: 1,
-        },
-        reboot_after_setup=True,
-        inject_fake_regression=True,
-        fuzzing_target_options=[
-            "--test:{extra_setup_dir}",
-            "--write_test_file={extra_output_dir}/test.txt",
-        ],
-        pool=PoolName("mariner")
     ),
     "windows-libfuzzer": Integration(
         template=TemplateType.libfuzzer,
@@ -266,7 +234,6 @@ class Integration(BaseModel):
             "--only_asan_failures",
             "--write_test_file={extra_output_dir}/test.txt",
         ],
-        pool="windows",
     ),
     "windows-libfuzzer-linked-library": Integration(
         template=TemplateType.libfuzzer,
@@ -279,7 +246,6 @@ class Integration(BaseModel):
             ContainerType.coverage: 1,
         },
         use_setup=True,
-        pool="windows",
     ),
     "windows-libfuzzer-load-library": Integration(
         template=TemplateType.libfuzzer,
@@ -292,7 +258,6 @@ class Integration(BaseModel):
             ContainerType.coverage: 1,
         },
         use_setup=True,
-        pool="windows",
     ),
     "windows-libfuzzer-dotnet": Integration(
         template=TemplateType.libfuzzer_dotnet,
@@ -310,7 +275,6 @@ class Integration(BaseModel):
             ContainerType.unique_reports: 1,
         },
         test_repro=False,
-        pool="windows",
     ),
     "windows-trivial-crash": Integration(
         template=TemplateType.radamsa,
@@ -319,7 +283,6 @@ class Integration(BaseModel):
         inputs="seeds",
         wait_for_files={ContainerType.unique_reports: 1},
         inject_fake_regression=True,
-        pool="windows",
     ),
 }
 
@@ -388,7 +351,7 @@ def try_info_get(data: Any) -> None:
 
         self.inject_log(self.start_log_marker)
         for entry in os_list:
-            name = self.build_pool_name(entry.name)
+            name = PoolName(f"testpool-{entry.name}-{self.test_id}")
             self.logger.info("creating pool: %s:%s", entry.name, name)
             self.of.pools.create(name, entry)
             self.logger.info("creating scaleset for pool: %s", name)
@@ -396,15 +359,6 @@ def try_info_get(data: Any) -> None:
                 name, pool_size, region=region, initial_size=pool_size
             )
 
-        name = self.build_pool_name("mariner")
-        self.logger.info("creating pool: %s:%s", "mariner", name)
-        self.of.pools.create(name, OS.linux)
-        self.logger.info("creating scaleset for pool: %s", name)
-        self.of.scalesets.create(
-            name, pool_size, region=region, initial_size=pool_size, image="MicrosoftCBLMariner:cbl-mariner:cbl-mariner-2-gen2:latest"
-        )
-
-
     class UnmanagedPool:
         def __init__(
             self,
@@ -606,9 +560,12 @@ def launch(
     ) -> List[UUID]:
         """Launch all of the fuzzing templates"""
 
-        pool = None
+        pools: Dict[OS, Pool] = {}
         if unmanaged_pool is not None:
-            pool = unmanaged_pool.pool_name
+            pools[unmanaged_pool.the_os] = self.of.pools.get(unmanaged_pool.pool_name)
+        else:
+            for pool in self.of.pools.list():
+                pools[pool.os] = pool
 
         job_ids = []
 
@@ -619,8 +576,8 @@ def launch(
             if config.os not in os_list:
                 continue
 
-            if pool is None:
-                pool = self.build_pool_name(config.pool)
+            if config.os not in pools.keys():
+                raise Exception(f"No pool for target: {target} ,os: {config.os}")
 
             self.logger.info("launching: %s", target)
 
@@ -644,9 +601,8 @@ def launch(
                 setup = Directory(os.path.join(setup, config.nested_setup_dir))
 
             job: Optional[Job] = None
-                
             job = self.build_job(
-                duration, pool, target, config, setup, target_exe, inputs
+                duration, pools, target, config, setup, target_exe, inputs
             )
 
             if config.inject_fake_regression and job is not None:
@@ -662,7 +618,7 @@ def launch(
     def build_job(
         self,
         duration: int,
-        pool: PoolName,
+        pools: Dict[OS, Pool],
         target: str,
         config: Integration,
         setup: Optional[Directory],
@@ -678,7 +634,7 @@ def build_job(
                 self.project,
                 target,
                 BUILD,
-                pool,
+                pools[config.os].name,
                 target_exe=target_exe,
                 inputs=inputs,
                 setup_dir=setup,
@@ -703,7 +659,7 @@ def build_job(
                 self.project,
                 target,
                 BUILD,
-                pool,
+                pools[config.os].name,
                 target_dll=File(config.target_exe),
                 inputs=inputs,
                 setup_dir=setup,
@@ -719,7 +675,7 @@ def build_job(
                 self.project,
                 target,
                 BUILD,
-                pool,
+                pools[config.os].name,
                 inputs=inputs,
                 target_exe=target_exe,
                 duration=duration,
@@ -732,7 +688,7 @@ def build_job(
                 self.project,
                 target,
                 BUILD,
-                pool_name=pool,
+                pool_name=pools[config.os].name,
                 target_exe=target_exe,
                 inputs=inputs,
                 setup_dir=setup,
@@ -747,7 +703,7 @@ def build_job(
                 self.project,
                 target,
                 BUILD,
-                pool_name=pool,
+                pool_name=pools[config.os].name,
                 target_exe=target_exe,
                 inputs=inputs,
                 setup_dir=setup,
@@ -1277,9 +1233,6 @@ def check_logs_for_errors(self) -> None:
 
         if seen_errors:
             raise Exception("logs included errors")
-        
-    def build_pool_name(self, os_type: str) -> PoolName:
-        return PoolName(f"testpool-{os_type}-{self.test_id}")
 
 
 class Run(Command):
diff --git a/src/runtime-tools/linux/setup.sh b/src/runtime-tools/linux/setup.sh
old mode 100644
new mode 100755
index 794e827f4d..f6859003b4
--- a/src/runtime-tools/linux/setup.sh
+++ b/src/runtime-tools/linux/setup.sh
@@ -18,14 +18,6 @@ export DOTNET_CLI_HOME="$DOTNET_ROOT"
 export ONEFUZZ_ROOT=/onefuzz
 export LLVM_SYMBOLIZER_PATH=/onefuzz/bin/llvm-symbolizer
 
-# `logger` won't work on mariner unless we install this package first
-if type yum > /dev/null 2> /dev/null; then
-    until yum install -y util-linux sudo; do
-        echo "yum failed.  sleep 10s, then retrying"
-        sleep 10
-    done
-fi
-
 logger "onefuzz: making directories"
 sudo mkdir -p /onefuzz/downloaded
 sudo chown -R $(whoami) /onefuzz
@@ -142,53 +134,31 @@ if type apt > /dev/null 2> /dev/null; then
         sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH
     fi
 
-    # Needed to install dotnet
+    # Install dotnet
     until sudo apt install -y curl libicu-dev; do
         logger "apt failed, sleeping 10s then retrying"
         sleep 10
     done
-elif type yum > /dev/null 2> /dev/null; then
-    until yum install -y gdb gdb-gdbserver libunwind awk ca-certificates tar yum-utils shadow-utils cronie procps; do
-        echo "yum failed.  sleep 10s, then retrying"
-        sleep 10
-    done
-
-    # Install updated Microsoft Open Management Infrastructure - github.com/microsoft/omi
-    yum-config-manager --add-repo=https://packages.microsoft.com/config/rhel/8/prod.repo 2>&1 | logger -s -i -t 'onefuzz-OMI-add-MS-repo'
-    yum install -y omi 2>&1 | logger -s -i -t 'onefuzz-OMI-install'
 
+    logger "downloading dotnet install"
+    curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install'
+    chmod +x dotnet-install.sh
 
-    if ! [ -f ${LLVM_SYMBOLIZER_PATH} ]; then
-        until yum install -y llvm-12.0.1; do
-            echo "yum failed, sleeping 10s then retrying"
-            sleep 10
-        done
-
-        # If specifying symbolizer, exe name must be a "known symbolizer".
-        # Using `llvm-symbolizer` works for clang 8 .. 12.
-        sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH
-    fi   
+    for version in "${DOTNET_VERSIONS[@]}"; do
+        logger "running dotnet install $version"
+        /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup'
+    done
+    rm dotnet-install.sh
+
+    logger "install dotnet tools"
+    pushd "$DOTNET_ROOT"
+    ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
+    "$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
+    "$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
+    "$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
+    popd
 fi
 
-# Install dotnet
-logger "downloading dotnet install"
-curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install'
-chmod +x dotnet-install.sh
-
-for version in "${DOTNET_VERSIONS[@]}"; do
-    logger "running dotnet install $version"
-    /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup'
-done
-rm dotnet-install.sh
-
-logger "install dotnet tools"
-pushd "$DOTNET_ROOT"
-ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
-"$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
-"$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
-"$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
-popd
-
 if  [ -v DOCKER_BUILD ]; then
     echo "building for docker"
 elif [ -d /etc/systemd/system ]; then

From c8986aaa91838a8d701cf0e1099be6a103b8b736 Mon Sep 17 00:00:00 2001
From: Adam <103067949+AdamL-Microsoft@users.noreply.github.com>
Date: Wed, 30 Aug 2023 13:53:49 -0700
Subject: [PATCH 02/16] Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)

This reverts commit c69deed50e81cc1805f6f82ebb10513a211cbbe2.
---
 .devcontainer/devcontainer.json               |   3 +-
 .github/workflows/ci.yml                      |   2 +
 CHANGELOG.md                                  |   6 -
 CURRENT_VERSION                               |   2 +-
 .../ApiService/Functions/QueueJobResult.cs    |  60 +++++++
 .../ApiService/OneFuzzTypes/Model.cs          |  45 +++++
 src/ApiService/ApiService/Program.cs          |   1 +
 .../ApiService/onefuzzlib/Config.cs           |   1 +
 .../ApiService/onefuzzlib/Extension.cs        |  44 ++---
 .../onefuzzlib/JobResultOperations.cs         | 121 +++++++++++++
 .../ApiService/onefuzzlib/OnefuzzContext.cs   |   2 +
 .../IntegrationTests/Fakes/TestContext.cs     |   3 +
 src/agent/Cargo.lock                          |  16 ++
 src/agent/Cargo.toml                          |   1 +
 src/agent/onefuzz-agent/src/config.rs         |  12 ++
 src/agent/onefuzz-agent/src/log_uploader.rs   |  29 ----
 src/agent/onefuzz-agent/src/work.rs           |   5 +-
 src/agent/onefuzz-result/Cargo.toml           |  18 ++
 src/agent/onefuzz-result/src/job_result.rs    | 129 ++++++++++++++
 src/agent/onefuzz-result/src/lib.rs           |   4 +
 src/agent/onefuzz-task/Cargo.toml             |   1 +
 src/agent/onefuzz-task/src/local/cmd.rs       |  42 +----
 src/agent/onefuzz-task/src/local/common.rs    |  26 +--
 .../example_templates/libfuzzer_basic.yml     |  34 ++--
 .../src/local/generic_analysis.rs             | 137 +--------------
 .../src/local/generic_crash_report.rs         | 138 +--------------
 .../src/local/generic_generator.rs            | 142 +--------------
 src/agent/onefuzz-task/src/local/libfuzzer.rs | 161 +-----------------
 .../src/local/libfuzzer_crash_report.rs       | 128 +-------------
 .../onefuzz-task/src/local/libfuzzer_merge.rs |  84 +--------
 .../src/local/libfuzzer_regression.rs         | 134 +--------------
 .../src/local/libfuzzer_test_input.rs         |  83 ---------
 src/agent/onefuzz-task/src/local/mod.rs       |   1 -
 src/agent/onefuzz-task/src/local/radamsa.rs   |  78 ---------
 src/agent/onefuzz-task/src/local/schema.json  |   8 +-
 src/agent/onefuzz-task/src/local/template.rs  |  13 +-
 .../onefuzz-task/src/local/test_input.rs      |  86 ----------
 .../src/tasks/analysis/generic.rs             |   5 +-
 src/agent/onefuzz-task/src/tasks/config.rs    |  20 +++
 .../src/tasks/coverage/generic.rs             |  19 ++-
 .../onefuzz-task/src/tasks/fuzz/generator.rs  |   7 +-
 .../src/tasks/fuzz/libfuzzer/common.rs        |  49 ++++--
 .../onefuzz-task/src/tasks/fuzz/supervisor.rs |  15 +-
 src/agent/onefuzz-task/src/tasks/heartbeat.rs |   2 +-
 .../onefuzz-task/src/tasks/merge/generic.rs   |   2 +-
 .../src/tasks/merge/libfuzzer_merge.rs        |   2 +-
 .../src/tasks/regression/common.rs            |  15 +-
 .../src/tasks/regression/generic.rs           |   3 +-
 .../src/tasks/regression/libfuzzer.rs         |   3 +-
 .../src/tasks/report/crash_report.rs          |  45 ++++-
 .../src/tasks/report/dotnet/generic.rs        |  22 ++-
 .../onefuzz-task/src/tasks/report/generic.rs  |  14 +-
 .../src/tasks/report/libfuzzer_report.rs      |   5 +
 src/agent/onefuzz/Cargo.toml                  |   1 +
 src/agent/onefuzz/src/blob/url.rs             |  23 ++-
 src/agent/onefuzz/src/syncdir.rs              |  66 ++++++-
 .../bicep-templates/storageAccounts.bicep     |   2 +-
 src/integration-tests/integration-test.py     |  77 +++++++--
 src/runtime-tools/linux/setup.sh              |  64 +++++--
 59 files changed, 872 insertions(+), 1389 deletions(-)
 create mode 100644 src/ApiService/ApiService/Functions/QueueJobResult.cs
 create mode 100644 src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs
 create mode 100644 src/agent/onefuzz-result/Cargo.toml
 create mode 100644 src/agent/onefuzz-result/src/job_result.rs
 create mode 100644 src/agent/onefuzz-result/src/lib.rs
 delete mode 100644 src/agent/onefuzz-task/src/local/radamsa.rs
 mode change 100755 => 100644 src/runtime-tools/linux/setup.sh

diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 4059b3d7c1..d3fcf050ed 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -13,6 +13,7 @@
 					"**/target/**": true
 				},
 				"lldb.executable": "/usr/bin/lldb",
+				"dotnet.server.useOmnisharp": true,
 				"omnisharp.enableEditorConfigSupport": true,
 				"omnisharp.enableRoslynAnalyzers": true,
 				"python.defaultInterpreterPath": "/workspaces/onefuzz/src/venv/bin/python",
@@ -48,4 +49,4 @@
 	"features": {
 		"ghcr.io/devcontainers/features/azure-cli:1": {}
 	}
-}
+}
\ No newline at end of file
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 12824fd182..2dd85d7c92 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -542,9 +542,11 @@ jobs:
 
           mkdir -p artifacts/linux-libfuzzer
           mkdir -p artifacts/linux-libfuzzer-with-options
+          mkdir -p artifacts/mariner-libfuzzer
           (cd libfuzzer ; make )
           cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer
           cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer-with-options
+          cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/mariner-libfuzzer
 
           mkdir -p artifacts/linux-libfuzzer-regression
           (cd libfuzzer-regression ; make )
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8d46ea2a0e..be4779ad77 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,12 +7,6 @@ All notable changes to this project will be documented in this file.
 The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
 and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
 
-## 8.7.1
-
-### Fixed
-
-* Service: Removed deprecated Azure retention policy setting that was causing scaleset deployment errors [#3452](https://github.com/microsoft/onefuzz/pull/3452)
-
 ## 8.7.0
 
 ### Added
diff --git a/CURRENT_VERSION b/CURRENT_VERSION
index efeecbe2c5..c0bcaebe8f 100644
--- a/CURRENT_VERSION
+++ b/CURRENT_VERSION
@@ -1 +1 @@
-8.7.1
\ No newline at end of file
+8.7.0
\ No newline at end of file
diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs
new file mode 100644
index 0000000000..d781a4d1e1
--- /dev/null
+++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs
@@ -0,0 +1,60 @@
+using System.Text.Json;
+using Microsoft.Azure.Functions.Worker;
+using Microsoft.Extensions.Logging;
+using Microsoft.OneFuzz.Service.OneFuzzLib.Orm;
+namespace Microsoft.OneFuzz.Service.Functions;
+
+
+public class QueueJobResult {
+    private readonly ILogger _log;
+    private readonly IOnefuzzContext _context;
+
+    public QueueJobResult(ILogger<QueueJobResult> logTracer, IOnefuzzContext context) {
+        _log = logTracer;
+        _context = context;
+    }
+
+    [Function("QueueJobResult")]
+    public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJobsStorage")] string msg) {
+
+        var _tasks = _context.TaskOperations;
+        var _jobs = _context.JobOperations;
+
+        _log.LogInformation("job result: {msg}", msg);
+        var jr = JsonSerializer.Deserialize<TaskJobResultEntry>(msg, EntityConverter.GetJsonSerializerOptions()).EnsureNotNull($"wrong data {msg}");
+
+        var task = await _tasks.GetByTaskId(jr.TaskId);
+        if (task == null) {
+            _log.LogWarning("invalid {TaskId}", jr.TaskId);
+            return;
+        }
+
+        var job = await _jobs.Get(task.JobId);
+        if (job == null) {
+            _log.LogWarning("invalid {JobId}", task.JobId);
+            return;
+        }
+
+        JobResultData? data = jr.Data;
+        if (data == null) {
+            _log.LogWarning($"job result data is empty, throwing out: {jr}");
+            return;
+        }
+
+        var jobResultType = data.Type;
+        _log.LogInformation($"job result data type: {jobResultType}");
+
+        Dictionary<string, double> value;
+        if (jr.Value.Count > 0) {
+            value = jr.Value;
+        } else {
+            _log.LogWarning($"job result data is empty, throwing out: {jr}");
+            return;
+        }
+
+        var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jobResultType, value);
+        if (!jobResult.IsOk) {
+            _log.LogError("failed to create or update with job result {JobId}", job.JobId);
+        }
+    }
+}
diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs
index e430c1448c..b839f52ddc 100644
--- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs
+++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs
@@ -33,6 +33,19 @@ public enum HeartbeatType {
     TaskAlive,
 }
 
+[SkipRename]
+public enum JobResultType {
+    NewCrashingInput,
+    NoReproCrashingInput,
+    NewReport,
+    NewUniqueReport,
+    NewRegressionReport,
+    NewCoverage,
+    NewCrashDump,
+    CoverageData,
+    RuntimeStats,
+}
+
 public record HeartbeatData(HeartbeatType Type);
 
 public record TaskHeartbeatEntry(
@@ -41,6 +54,16 @@ public record TaskHeartbeatEntry(
     Guid MachineId,
     HeartbeatData[] Data);
 
+public record JobResultData(JobResultType Type);
+
+public record TaskJobResultEntry(
+    Guid TaskId,
+    Guid? JobId,
+    Guid MachineId,
+    JobResultData Data,
+    Dictionary<string, double> Value
+    );
+
 public record NodeHeartbeatEntry(Guid NodeId, HeartbeatData[] Data);
 
 public record NodeCommandStopIfFree();
@@ -892,6 +915,27 @@ public record SecretAddress<T>(Uri Url) : ISecret<T> {
 public record SecretData<T>(ISecret<T> Secret) {
 }
 
+public record JobResult(
+    [PartitionKey][RowKey] Guid JobId,
+    string Project,
+    string Name,
+    double NewCrashingInput = 0,
+    double NoReproCrashingInput = 0,
+    double NewReport = 0,
+    double NewUniqueReport = 0,
+    double NewRegressionReport = 0,
+    double NewCrashDump = 0,
+    double InstructionsCovered = 0,
+    double TotalInstructions = 0,
+    double CoverageRate = 0,
+    double IterationCount = 0
+) : EntityBase() {
+    public JobResult(Guid JobId, string Project, string Name) : this(
+        JobId: JobId,
+        Project: Project,
+        Name: Name, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) { }
+}
+
 public record JobConfig(
     string Project,
     string Name,
@@ -1056,6 +1100,7 @@ public record TaskUnitConfig(
     string? InstanceTelemetryKey,
     string? MicrosoftTelemetryKey,
     Uri HeartbeatQueue,
+    Uri JobResultQueue,
     Dictionary<string, string> Tags
     ) {
     public Uri? inputQueue { get; set; }
diff --git a/src/ApiService/ApiService/Program.cs b/src/ApiService/ApiService/Program.cs
index f425c00809..d5ee30b45e 100644
--- a/src/ApiService/ApiService/Program.cs
+++ b/src/ApiService/ApiService/Program.cs
@@ -118,6 +118,7 @@ public static async Async.Task Main() {
                 .AddScoped<IVmOperations, VmOperations>()
                 .AddScoped<ISecretsOperations, SecretsOperations>()
                 .AddScoped<IJobOperations, JobOperations>()
+                .AddScoped<IJobResultOperations, JobResultOperations>()
                 .AddScoped<INsgOperations, NsgOperations>()
                 .AddScoped<IScheduler, Scheduler>()
                 .AddScoped<IConfig, Config>()
diff --git a/src/ApiService/ApiService/onefuzzlib/Config.cs b/src/ApiService/ApiService/onefuzzlib/Config.cs
index 71af317348..872cedbc01 100644
--- a/src/ApiService/ApiService/onefuzzlib/Config.cs
+++ b/src/ApiService/ApiService/onefuzzlib/Config.cs
@@ -71,6 +71,7 @@ private static BlobContainerSasPermissions ConvertPermissions(ContainerPermissio
             InstanceTelemetryKey: _serviceConfig.ApplicationInsightsInstrumentationKey,
             MicrosoftTelemetryKey: _serviceConfig.OneFuzzTelemetry,
             HeartbeatQueue: await _queue.GetQueueSas("task-heartbeat", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"),
+            JobResultQueue: await _queue.GetQueueSas("job-result", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"),
             Tags: task.Config.Tags ?? new Dictionary<string, string>()
         );
 
diff --git a/src/ApiService/ApiService/onefuzzlib/Extension.cs b/src/ApiService/ApiService/onefuzzlib/Extension.cs
index 7995026eca..fbf62dd343 100644
--- a/src/ApiService/ApiService/onefuzzlib/Extension.cs
+++ b/src/ApiService/ApiService/onefuzzlib/Extension.cs
@@ -36,7 +36,9 @@ public async Async.Task<IList<VMExtensionWrapper>> GenericExtensions(AzureLocati
         var extensions = new List<VMExtensionWrapper>();
 
         var instanceConfig = await _context.ConfigOperations.Fetch();
-        extensions.Add(await MonitorExtension(region, vmOs));
+        if (vmOs == Os.Windows) {
+            extensions.Add(await MonitorExtension(region));
+        }
 
         var depenency = DependencyExtension(region, vmOs);
         if (depenency is not null) {
@@ -329,37 +331,21 @@ public async Async.Task<VMExtensionWrapper> AgentConfig(AzureLocation region, Os
         throw new NotSupportedException($"unsupported OS: {vmOs}");
     }
 
-    public async Async.Task<VMExtensionWrapper> MonitorExtension(AzureLocation region, Os vmOs) {
+    public async Async.Task<VMExtensionWrapper> MonitorExtension(AzureLocation region) {
         var settings = await _context.LogAnalytics.GetMonitorSettings();
         var extensionSettings = JsonSerializer.Serialize(new { WorkspaceId = settings.Id }, _extensionSerializerOptions);
         var protectedExtensionSettings = JsonSerializer.Serialize(new { WorkspaceKey = settings.Key }, _extensionSerializerOptions);
-        if (vmOs == Os.Windows) {
-            return new VMExtensionWrapper {
-                Location = region,
-                Name = "OMSExtension",
-                TypePropertiesType = "MicrosoftMonitoringAgent",
-                Publisher = "Microsoft.EnterpriseCloud.Monitoring",
-                TypeHandlerVersion = "1.0",
-                AutoUpgradeMinorVersion = true,
-                Settings = new BinaryData(extensionSettings),
-                ProtectedSettings = new BinaryData(protectedExtensionSettings),
-                EnableAutomaticUpgrade = false
-            };
-        } else if (vmOs == Os.Linux) {
-            return new VMExtensionWrapper {
-                Location = region,
-                Name = "OmsAgentForLinux",
-                TypePropertiesType = "OmsAgentForLinux",
-                Publisher = "Microsoft.EnterpriseCloud.Monitoring",
-                TypeHandlerVersion = "1.0",
-                AutoUpgradeMinorVersion = true,
-                Settings = new BinaryData(extensionSettings),
-                ProtectedSettings = new BinaryData(protectedExtensionSettings),
-                EnableAutomaticUpgrade = false
-            };
-        } else {
-            throw new NotSupportedException($"unsupported os: {vmOs}");
-        }
+        return new VMExtensionWrapper {
+            Location = region,
+            Name = "OMSExtension",
+            TypePropertiesType = "MicrosoftMonitoringAgent",
+            Publisher = "Microsoft.EnterpriseCloud.Monitoring",
+            TypeHandlerVersion = "1.0",
+            AutoUpgradeMinorVersion = true,
+            Settings = new BinaryData(extensionSettings),
+            ProtectedSettings = new BinaryData(protectedExtensionSettings),
+            EnableAutomaticUpgrade = false
+        };
     }
 
 
diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs
new file mode 100644
index 0000000000..1166cf91d4
--- /dev/null
+++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs
@@ -0,0 +1,121 @@
+using ApiService.OneFuzzLib.Orm;
+using Microsoft.Extensions.Logging;
+using Polly;
+namespace Microsoft.OneFuzz.Service;
+
+public interface IJobResultOperations : IOrm<JobResult> {
+
+    Async.Task<JobResult?> GetJobResult(Guid jobId);
+    Async.Task<OneFuzzResultVoid> CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary<string, double> resultValue);
+
+}
+public class JobResultOperations : Orm<JobResult>, IJobResultOperations {
+
+    public JobResultOperations(ILogger<JobResultOperations> log, IOnefuzzContext context)
+        : base(log, context) {
+    }
+
+    public async Async.Task<JobResult?> GetJobResult(Guid jobId) {
+        return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync();
+    }
+
+    private JobResult UpdateResult(JobResult result, JobResultType type, Dictionary<string, double> resultValue) {
+
+        var newResult = result;
+        double newValue;
+        switch (type) {
+            case JobResultType.NewCrashingInput:
+                newValue = result.NewCrashingInput + resultValue["count"];
+                newResult = result with { NewCrashingInput = newValue };
+                break;
+            case JobResultType.NewReport:
+                newValue = result.NewReport + resultValue["count"];
+                newResult = result with { NewReport = newValue };
+                break;
+            case JobResultType.NewUniqueReport:
+                newValue = result.NewUniqueReport + resultValue["count"];
+                newResult = result with { NewUniqueReport = newValue };
+                break;
+            case JobResultType.NewRegressionReport:
+                newValue = result.NewRegressionReport + resultValue["count"];
+                newResult = result with { NewRegressionReport = newValue };
+                break;
+            case JobResultType.NewCrashDump:
+                newValue = result.NewCrashDump + resultValue["count"];
+                newResult = result with { NewCrashDump = newValue };
+                break;
+            case JobResultType.CoverageData:
+                double newCovered = resultValue["covered"];
+                double newTotalCovered = resultValue["features"];
+                double newCoverageRate = resultValue["rate"];
+                newResult = result with { InstructionsCovered = newCovered, TotalInstructions = newTotalCovered, CoverageRate = newCoverageRate };
+                break;
+            case JobResultType.RuntimeStats:
+                double newTotalIterations = resultValue["total_count"];
+                newResult = result with { IterationCount = newTotalIterations };
+                break;
+            default:
+                _logTracer.LogWarning($"Invalid Field {type}.");
+                break;
+        }
+        _logTracer.LogInformation($"Attempting to log new result: {newResult}");
+        return newResult;
+    }
+
+    private async Async.Task<bool> TryUpdate(Job job, JobResultType resultType, Dictionary<string, double> resultValue) {
+        var jobId = job.JobId;
+
+        var jobResult = await GetJobResult(jobId);
+
+        if (jobResult == null) {
+            _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId);
+
+            var entry = new JobResult(JobId: jobId, Project: job.Config.Project, Name: job.Config.Name);
+
+            jobResult = UpdateResult(entry, resultType, resultValue);
+
+            var r = await Insert(jobResult);
+            if (!r.IsOk) {
+                throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}");
+            }
+            _logTracer.LogInformation("created job result {JobId}", jobResult.JobId);
+        } else {
+            _logTracer.LogInformation("Updating existing JobResult entry for Job {JobId}", jobId);
+
+            jobResult = UpdateResult(jobResult, resultType, resultValue);
+
+            var r = await Update(jobResult);
+            if (!r.IsOk) {
+                throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}");
+            }
+            _logTracer.LogInformation("updated job result {JobId}", jobResult.JobId);
+        }
+
+        return true;
+    }
+
+    public async Async.Task<OneFuzzResultVoid> CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary<string, double> resultValue) {
+
+        var job = await _context.JobOperations.Get(jobId);
+        if (job == null) {
+            return OneFuzzResultVoid.Error(ErrorCode.INVALID_REQUEST, "invalid job");
+        }
+
+        var success = false;
+        try {
+            _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId);
+            var policy = Policy.Handle<InvalidOperationException>().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5));
+            await policy.ExecuteAsync(async () => {
+                success = await TryUpdate(job, resultType, resultValue);
+                _logTracer.LogInformation("attempt {success}", success);
+            });
+            return OneFuzzResultVoid.Ok;
+        } catch (Exception e) {
+            return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, new string[] {
+                    $"Unexpected failure when attempting to update job result for {job.JobId}",
+                    $"Exception: {e}"
+                });
+        }
+    }
+}
+
diff --git a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs
index d877bfddbb..03c6322663 100644
--- a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs
+++ b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs
@@ -19,6 +19,7 @@ public interface IOnefuzzContext {
     IExtensions Extensions { get; }
     IIpOperations IpOperations { get; }
     IJobOperations JobOperations { get; }
+    IJobResultOperations JobResultOperations { get; }
     ILogAnalytics LogAnalytics { get; }
     INodeMessageOperations NodeMessageOperations { get; }
     INodeOperations NodeOperations { get; }
@@ -83,6 +84,7 @@ public OnefuzzContext(IServiceProvider serviceProvider) {
     public IVmOperations VmOperations => _serviceProvider.GetRequiredService<IVmOperations>();
     public ISecretsOperations SecretsOperations => _serviceProvider.GetRequiredService<ISecretsOperations>();
     public IJobOperations JobOperations => _serviceProvider.GetRequiredService<IJobOperations>();
+    public IJobResultOperations JobResultOperations => _serviceProvider.GetRequiredService<IJobResultOperations>();
     public IScheduler Scheduler => _serviceProvider.GetRequiredService<IScheduler>();
     public IConfig Config => _serviceProvider.GetRequiredService<IConfig>();
     public ILogAnalytics LogAnalytics => _serviceProvider.GetRequiredService<ILogAnalytics>();
diff --git a/src/ApiService/IntegrationTests/Fakes/TestContext.cs b/src/ApiService/IntegrationTests/Fakes/TestContext.cs
index c46ff5fce7..66d121e746 100644
--- a/src/ApiService/IntegrationTests/Fakes/TestContext.cs
+++ b/src/ApiService/IntegrationTests/Fakes/TestContext.cs
@@ -32,6 +32,7 @@ public TestContext(IHttpClientFactory httpClientFactory, OneFuzzLoggerProvider p
         TaskOperations = new TaskOperations(provider.CreateLogger<TaskOperations>(), Cache, this);
         NodeOperations = new NodeOperations(provider.CreateLogger<NodeOperations>(), this);
         JobOperations = new JobOperations(provider.CreateLogger<JobOperations>(), this);
+        JobResultOperations = new JobResultOperations(provider.CreateLogger<JobResultOperations>(), this);
         NodeTasksOperations = new NodeTasksOperations(provider.CreateLogger<NodeTasksOperations>(), this);
         TaskEventOperations = new TaskEventOperations(provider.CreateLogger<TaskEventOperations>(), this);
         NodeMessageOperations = new NodeMessageOperations(provider.CreateLogger<NodeMessageOperations>(), this);
@@ -57,6 +58,7 @@ public Async.Task InsertAll(params EntityBase[] objs)
                 Node n => NodeOperations.Insert(n),
                 Pool p => PoolOperations.Insert(p),
                 Job j => JobOperations.Insert(j),
+                JobResult jr => JobResultOperations.Insert(jr),
                 Repro r => ReproOperations.Insert(r),
                 Scaleset ss => ScalesetOperations.Insert(ss),
                 NodeTasks nt => NodeTasksOperations.Insert(nt),
@@ -84,6 +86,7 @@ public Async.Task InsertAll(params EntityBase[] objs)
 
     public ITaskOperations TaskOperations { get; }
     public IJobOperations JobOperations { get; }
+    public IJobResultOperations JobResultOperations { get; }
     public INodeOperations NodeOperations { get; }
     public INodeTasksOperations NodeTasksOperations { get; }
     public ITaskEventOperations TaskEventOperations { get; }
diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock
index a1d86e7d25..254684be97 100644
--- a/src/agent/Cargo.lock
+++ b/src/agent/Cargo.lock
@@ -2123,6 +2123,7 @@ dependencies = [
  "log",
  "nix",
  "notify",
+ "onefuzz-result",
  "onefuzz-telemetry",
  "pete",
  "pretty_assertions",
@@ -2197,6 +2198,20 @@ dependencies = [
  "serde_json",
 ]
 
+[[package]]
+name = "onefuzz-result"
+version = "0.2.0"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "log",
+ "onefuzz-telemetry",
+ "reqwest",
+ "serde",
+ "storage-queue",
+ "uuid",
+]
+
 [[package]]
 name = "onefuzz-task"
 version = "0.2.0"
@@ -2226,6 +2241,7 @@ dependencies = [
  "num_cpus",
  "onefuzz",
  "onefuzz-file-format",
+ "onefuzz-result",
  "onefuzz-telemetry",
  "path-absolutize",
  "pretty_assertions",
diff --git a/src/agent/Cargo.toml b/src/agent/Cargo.toml
index 2f4cea41a4..ce01ae880c 100644
--- a/src/agent/Cargo.toml
+++ b/src/agent/Cargo.toml
@@ -10,6 +10,7 @@ members = [
     "onefuzz",
     "onefuzz-task",
     "onefuzz-agent",
+    "onefuzz-result",
     "onefuzz-file-format",
     "onefuzz-telemetry",
     "reqwest-retry",
diff --git a/src/agent/onefuzz-agent/src/config.rs b/src/agent/onefuzz-agent/src/config.rs
index 87edfb2c1b..fc623e72af 100644
--- a/src/agent/onefuzz-agent/src/config.rs
+++ b/src/agent/onefuzz-agent/src/config.rs
@@ -34,6 +34,8 @@ pub struct StaticConfig {
 
     pub heartbeat_queue: Option<Url>,
 
+    pub job_result_queue: Option<Url>,
+
     pub instance_id: Uuid,
 
     #[serde(default = "default_as_true")]
@@ -71,6 +73,8 @@ struct RawStaticConfig {
 
     pub heartbeat_queue: Option<Url>,
 
+    pub job_result_queue: Option<Url>,
+
     pub instance_id: Uuid,
 
     #[serde(default = "default_as_true")]
@@ -117,6 +121,7 @@ impl StaticConfig {
             microsoft_telemetry_key: config.microsoft_telemetry_key,
             instance_telemetry_key: config.instance_telemetry_key,
             heartbeat_queue: config.heartbeat_queue,
+            job_result_queue: config.job_result_queue,
             instance_id: config.instance_id,
             managed: config.managed,
             machine_identity,
@@ -152,6 +157,12 @@ impl StaticConfig {
             None
         };
 
+        let job_result_queue = if let Ok(key) = std::env::var("ONEFUZZ_JOB_RESULT") {
+            Some(Url::parse(&key)?)
+        } else {
+            None
+        };
+
         let instance_telemetry_key =
             if let Ok(key) = std::env::var("ONEFUZZ_INSTANCE_TELEMETRY_KEY") {
                 Some(InstanceTelemetryKey::new(Uuid::parse_str(&key)?))
@@ -183,6 +194,7 @@ impl StaticConfig {
             instance_telemetry_key,
             microsoft_telemetry_key,
             heartbeat_queue,
+            job_result_queue,
             instance_id,
             managed: !is_unmanaged,
             machine_identity,
diff --git a/src/agent/onefuzz-agent/src/log_uploader.rs b/src/agent/onefuzz-agent/src/log_uploader.rs
index 6bccc0bef2..d424013421 100644
--- a/src/agent/onefuzz-agent/src/log_uploader.rs
+++ b/src/agent/onefuzz-agent/src/log_uploader.rs
@@ -210,32 +210,3 @@ async fn sync_file(
     blob_client.append_block(Body::from(f)).await?;
     Ok(len)
 }
-
-#[cfg(test)]
-mod tests {
-    use std::io::Seek;
-
-    use anyhow::Result;
-    use tokio::io::{AsyncReadExt, AsyncSeekExt};
-
-    #[allow(clippy::unused_io_amount)]
-    #[tokio::test]
-    #[ignore]
-
-    async fn test_seek_behavior() -> Result<()> {
-        let path = "C:\\temp\\test.ps1";
-        let mut std_file = std::fs::File::open(path)?;
-        std_file.seek(std::io::SeekFrom::Start(3))?;
-
-        let mut tokio_file = tokio::fs::File::from_std(std_file);
-
-        let buf = &mut [0u8; 5];
-        tokio_file.read(buf).await?;
-        println!("******** buf {:?}", buf);
-        tokio_file.seek(std::io::SeekFrom::Start(0)).await?;
-        tokio_file.read(buf).await?;
-        println!("******** buf {:?}", buf);
-
-        Ok(())
-    }
-}
diff --git a/src/agent/onefuzz-agent/src/work.rs b/src/agent/onefuzz-agent/src/work.rs
index b55d1d86a1..d0222744a7 100644
--- a/src/agent/onefuzz-agent/src/work.rs
+++ b/src/agent/onefuzz-agent/src/work.rs
@@ -91,7 +91,10 @@ impl WorkSet {
 
     pub fn setup_dir(&self) -> Result<PathBuf> {
         let root = self.get_root_folder()?;
-        self.setup_url.as_path(root)
+        // Putting the setup container at the root for backward compatibility.
+        // The path of setup folder can be used as part of the deduplication logic in the bug filing service
+        let setup_root = root.parent().ok_or_else(|| anyhow!("Invalid root"))?;
+        self.setup_url.as_path(setup_root)
     }
 
     pub fn extra_setup_dir(&self) -> Result<Option<PathBuf>> {
diff --git a/src/agent/onefuzz-result/Cargo.toml b/src/agent/onefuzz-result/Cargo.toml
new file mode 100644
index 0000000000..7c7de6615c
--- /dev/null
+++ b/src/agent/onefuzz-result/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "onefuzz-result"
+version = "0.2.0"
+authors = ["fuzzing@microsoft.com"]
+edition = "2021"
+publish = false
+license = "MIT"
+
+[dependencies]
+anyhow = { version = "1.0", features = ["backtrace"] }
+async-trait = "0.1"
+reqwest = "0.11"
+serde = "1.0"
+storage-queue = { path = "../storage-queue" }
+uuid = { version = "1.4", features = ["serde", "v4"] }
+onefuzz-telemetry = { path = "../onefuzz-telemetry" }
+log = "0.4"
+
diff --git a/src/agent/onefuzz-result/src/job_result.rs b/src/agent/onefuzz-result/src/job_result.rs
new file mode 100644
index 0000000000..b305eca2cb
--- /dev/null
+++ b/src/agent/onefuzz-result/src/job_result.rs
@@ -0,0 +1,129 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+
+use anyhow::Result;
+use async_trait::async_trait;
+use onefuzz_telemetry::warn;
+use reqwest::Url;
+use serde::{self, Deserialize, Serialize};
+use std::collections::HashMap;
+use std::sync::Arc;
+use storage_queue::QueueClient;
+use uuid::Uuid;
+
+#[derive(Debug, Deserialize, Serialize, Hash, Eq, PartialEq, Clone)]
+#[serde(tag = "type")]
+pub enum JobResultData {
+    NewCrashingInput,
+    NoReproCrashingInput,
+    NewReport,
+    NewUniqueReport,
+    NewRegressionReport,
+    NewCoverage,
+    NewCrashDump,
+    CoverageData,
+    RuntimeStats,
+}
+
+#[derive(Debug, Deserialize, Serialize, Clone)]
+struct JobResult {
+    task_id: Uuid,
+    job_id: Uuid,
+    machine_id: Uuid,
+    machine_name: String,
+    data: JobResultData,
+    value: HashMap<String, f64>,
+}
+
+#[derive(Clone)]
+pub struct TaskContext {
+    task_id: Uuid,
+    job_id: Uuid,
+    machine_id: Uuid,
+    machine_name: String,
+}
+
+pub struct JobResultContext<TaskContext> {
+    pub state: TaskContext,
+    pub queue_client: QueueClient,
+}
+
+pub struct JobResultClient<TaskContext> {
+    pub context: Arc<JobResultContext<TaskContext>>,
+}
+
+impl<TaskContext> JobResultClient<TaskContext> {
+    pub fn init_job_result(
+        context: TaskContext,
+        queue_url: Url,
+    ) -> Result<JobResultClient<TaskContext>>
+    where
+        TaskContext: Send + Sync + 'static,
+    {
+        let context = Arc::new(JobResultContext {
+            state: context,
+            queue_client: QueueClient::new(queue_url)?,
+        });
+
+        Ok(JobResultClient { context })
+    }
+}
+
+pub type TaskJobResultClient = JobResultClient<TaskContext>;
+
+pub async fn init_job_result(
+    queue_url: Url,
+    task_id: Uuid,
+    job_id: Uuid,
+    machine_id: Uuid,
+    machine_name: String,
+) -> Result<TaskJobResultClient> {
+    let hb = JobResultClient::init_job_result(
+        TaskContext {
+            task_id,
+            job_id,
+            machine_id,
+            machine_name,
+        },
+        queue_url,
+    )?;
+    Ok(hb)
+}
+
+#[async_trait]
+pub trait JobResultSender {
+    async fn send_direct(&self, data: JobResultData, value: HashMap<String, f64>);
+}
+
+#[async_trait]
+impl JobResultSender for TaskJobResultClient {
+    async fn send_direct(&self, data: JobResultData, value: HashMap<String, f64>) {
+        let task_id = self.context.state.task_id;
+        let job_id = self.context.state.job_id;
+        let machine_id = self.context.state.machine_id;
+        let machine_name = self.context.state.machine_name.clone();
+
+        let _ = self
+            .context
+            .queue_client
+            .enqueue(JobResult {
+                task_id,
+                job_id,
+                machine_id,
+                machine_name,
+                data,
+                value,
+            })
+            .await;
+    }
+}
+
+#[async_trait]
+impl JobResultSender for Option<TaskJobResultClient> {
+    async fn send_direct(&self, data: JobResultData, value: HashMap<String, f64>) {
+        match self {
+            Some(client) => client.send_direct(data, value).await,
+            None => warn!("Failed to send Job Result message data from agent."),
+        }
+    }
+}
diff --git a/src/agent/onefuzz-result/src/lib.rs b/src/agent/onefuzz-result/src/lib.rs
new file mode 100644
index 0000000000..dae666ca9a
--- /dev/null
+++ b/src/agent/onefuzz-result/src/lib.rs
@@ -0,0 +1,4 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+
+pub mod job_result;
diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml
index 0ad2f9aa4f..4e0bd381b0 100644
--- a/src/agent/onefuzz-task/Cargo.toml
+++ b/src/agent/onefuzz-task/Cargo.toml
@@ -39,6 +39,7 @@ serde_json = "1.0"
 serde_yaml = "0.9.21"
 onefuzz = { path = "../onefuzz" }
 onefuzz-telemetry = { path = "../onefuzz-telemetry" }
+onefuzz-result = { path = "../onefuzz-result" }
 path-absolutize = "3.1"
 reqwest-retry = { path = "../reqwest-retry" }
 strum = "0.25"
diff --git a/src/agent/onefuzz-task/src/local/cmd.rs b/src/agent/onefuzz-task/src/local/cmd.rs
index 80fd51a96b..eabefb71ee 100644
--- a/src/agent/onefuzz-task/src/local/cmd.rs
+++ b/src/agent/onefuzz-task/src/local/cmd.rs
@@ -3,11 +3,7 @@
 
 #[cfg(any(target_os = "linux", target_os = "windows"))]
 use crate::local::coverage;
-use crate::local::{
-    common::add_common_config, generic_analysis, generic_crash_report, generic_generator,
-    libfuzzer, libfuzzer_crash_report, libfuzzer_fuzz, libfuzzer_merge, libfuzzer_regression,
-    libfuzzer_test_input, radamsa, test_input, tui::TerminalUi,
-};
+use crate::local::{common::add_common_config, libfuzzer_fuzz, tui::TerminalUi};
 use anyhow::{Context, Result};
 use clap::{Arg, ArgAction, Command};
 use std::time::Duration;
@@ -21,19 +17,9 @@ use super::template;
 #[derive(Debug, PartialEq, Eq, EnumString, IntoStaticStr, EnumIter)]
 #[strum(serialize_all = "kebab-case")]
 enum Commands {
-    Radamsa,
     #[cfg(any(target_os = "linux", target_os = "windows"))]
     Coverage,
     LibfuzzerFuzz,
-    LibfuzzerMerge,
-    LibfuzzerCrashReport,
-    LibfuzzerTestInput,
-    LibfuzzerRegression,
-    Libfuzzer,
-    CrashReport,
-    Generator,
-    Analysis,
-    TestInput,
     Template,
 }
 
@@ -68,23 +54,7 @@ pub async fn run(args: clap::ArgMatches) -> Result<()> {
         match command {
             #[cfg(any(target_os = "linux", target_os = "windows"))]
             Commands::Coverage => coverage::run(&sub_args, event_sender).await,
-            Commands::Radamsa => radamsa::run(&sub_args, event_sender).await,
-            Commands::LibfuzzerCrashReport => {
-                libfuzzer_crash_report::run(&sub_args, event_sender).await
-            }
             Commands::LibfuzzerFuzz => libfuzzer_fuzz::run(&sub_args, event_sender).await,
-            Commands::LibfuzzerMerge => libfuzzer_merge::run(&sub_args, event_sender).await,
-            Commands::LibfuzzerTestInput => {
-                libfuzzer_test_input::run(&sub_args, event_sender).await
-            }
-            Commands::LibfuzzerRegression => {
-                libfuzzer_regression::run(&sub_args, event_sender).await
-            }
-            Commands::Libfuzzer => libfuzzer::run(&sub_args, event_sender).await,
-            Commands::CrashReport => generic_crash_report::run(&sub_args, event_sender).await,
-            Commands::Generator => generic_generator::run(&sub_args, event_sender).await,
-            Commands::Analysis => generic_analysis::run(&sub_args, event_sender).await,
-            Commands::TestInput => test_input::run(&sub_args, event_sender).await,
             Commands::Template => {
                 let config = sub_args
                     .get_one::<PathBuf>("config")
@@ -140,17 +110,7 @@ pub fn args(name: &'static str) -> Command {
         let app = match subcommand {
             #[cfg(any(target_os = "linux", target_os = "windows"))]
             Commands::Coverage => coverage::args(subcommand.into()),
-            Commands::Radamsa => radamsa::args(subcommand.into()),
-            Commands::LibfuzzerCrashReport => libfuzzer_crash_report::args(subcommand.into()),
             Commands::LibfuzzerFuzz => libfuzzer_fuzz::args(subcommand.into()),
-            Commands::LibfuzzerMerge => libfuzzer_merge::args(subcommand.into()),
-            Commands::LibfuzzerTestInput => libfuzzer_test_input::args(subcommand.into()),
-            Commands::LibfuzzerRegression => libfuzzer_regression::args(subcommand.into()),
-            Commands::Libfuzzer => libfuzzer::args(subcommand.into()),
-            Commands::CrashReport => generic_crash_report::args(subcommand.into()),
-            Commands::Generator => generic_generator::args(subcommand.into()),
-            Commands::Analysis => generic_analysis::args(subcommand.into()),
-            Commands::TestInput => test_input::args(subcommand.into()),
             Commands::Template => Command::new("template")
                 .about("uses the template to generate a run")
                 .args(vec![Arg::new("config")
diff --git a/src/agent/onefuzz-task/src/local/common.rs b/src/agent/onefuzz-task/src/local/common.rs
index f8d7949e80..17940d799f 100644
--- a/src/agent/onefuzz-task/src/local/common.rs
+++ b/src/agent/onefuzz-task/src/local/common.rs
@@ -26,20 +26,10 @@ pub const INPUTS_DIR: &str = "inputs_dir";
 pub const CRASHES_DIR: &str = "crashes_dir";
 pub const CRASHDUMPS_DIR: &str = "crashdumps_dir";
 pub const TARGET_WORKERS: &str = "target_workers";
-pub const REPORTS_DIR: &str = "reports_dir";
-pub const NO_REPRO_DIR: &str = "no_repro_dir";
 pub const TARGET_TIMEOUT: &str = "target_timeout";
-pub const CHECK_RETRY_COUNT: &str = "check_retry_count";
-pub const DISABLE_CHECK_QUEUE: &str = "disable_check_queue";
-pub const UNIQUE_REPORTS_DIR: &str = "unique_reports_dir";
 pub const COVERAGE_DIR: &str = "coverage_dir";
 pub const READONLY_INPUTS: &str = "readonly_inputs_dir";
-pub const CHECK_ASAN_LOG: &str = "check_asan_log";
-pub const TOOLS_DIR: &str = "tools_dir";
-pub const RENAME_OUTPUT: &str = "rename_output";
 pub const CHECK_FUZZER_HELP: &str = "check_fuzzer_help";
-pub const DISABLE_CHECK_DEBUGGER: &str = "disable_check_debugger";
-pub const REGRESSION_REPORTS_DIR: &str = "regression_reports_dir";
 
 pub const TARGET_EXE: &str = "target_exe";
 pub const TARGET_ENV: &str = "target_env";
@@ -47,17 +37,6 @@ pub const TARGET_OPTIONS: &str = "target_options";
 // pub const SUPERVISOR_EXE: &str = "supervisor_exe";
 // pub const SUPERVISOR_ENV: &str = "supervisor_env";
 // pub const SUPERVISOR_OPTIONS: &str = "supervisor_options";
-pub const GENERATOR_EXE: &str = "generator_exe";
-pub const GENERATOR_ENV: &str = "generator_env";
-pub const GENERATOR_OPTIONS: &str = "generator_options";
-
-pub const ANALYZER_EXE: &str = "analyzer_exe";
-pub const ANALYZER_OPTIONS: &str = "analyzer_options";
-pub const ANALYZER_ENV: &str = "analyzer_env";
-pub const ANALYSIS_DIR: &str = "analysis_dir";
-pub const ANALYSIS_INPUTS: &str = "analysis_inputs";
-pub const ANALYSIS_UNIQUE_INPUTS: &str = "analysis_unique_inputs";
-pub const PRESERVE_EXISTING_OUTPUTS: &str = "preserve_existing_outputs";
 
 pub const CREATE_JOB_DIR: &str = "create_job_dir";
 
@@ -66,7 +45,6 @@ const WAIT_FOR_DIR_DELAY: Duration = Duration::from_secs(1);
 
 pub enum CmdType {
     Target,
-    Generator,
     // Supervisor,
 }
 
@@ -90,7 +68,6 @@ pub fn get_cmd_exe(cmd_type: CmdType, args: &clap::ArgMatches) -> Result<String>
     let name = match cmd_type {
         CmdType::Target => TARGET_EXE,
         // CmdType::Supervisor => SUPERVISOR_EXE,
-        CmdType::Generator => GENERATOR_EXE,
     };
 
     args.get_one::<String>(name)
@@ -102,7 +79,6 @@ pub fn get_cmd_arg(cmd_type: CmdType, args: &clap::ArgMatches) -> Vec<String> {
     let name = match cmd_type {
         CmdType::Target => TARGET_OPTIONS,
         // CmdType::Supervisor => SUPERVISOR_OPTIONS,
-        CmdType::Generator => GENERATOR_OPTIONS,
     };
 
     args.get_many::<String>(name)
@@ -115,7 +91,6 @@ pub fn get_cmd_env(cmd_type: CmdType, args: &clap::ArgMatches) -> Result<HashMap
     let env_name = match cmd_type {
         CmdType::Target => TARGET_ENV,
         // CmdType::Supervisor => SUPERVISOR_ENV,
-        CmdType::Generator => GENERATOR_ENV,
     };
     get_hash_map(args, env_name)
 }
@@ -265,6 +240,7 @@ pub async fn build_local_context(
         },
         instance_telemetry_key: None,
         heartbeat_queue: None,
+        job_result_queue: None,
         microsoft_telemetry_key: None,
         logs: None,
         min_available_memory_mb: 0,
diff --git a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml
index 7210893809..aba02c7991 100644
--- a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml
+++ b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml
@@ -5,28 +5,31 @@
 
 # 2. Install llvm and export LLVM_SYMBOLIZER_PATH like we do in setup.sh
 
+required_args: &required_args
+  target_exe: "REPLACE_ME" # The path to your target
+  inputs: &inputs "REPLACE_ME" # A folder containining your inputs
+  crashes: &crashes "REPLACE_ME" # The folder where you want the crashing inputs to be output
+  crashdumps: "REPLACE_ME" # The folder where you want the crash dumps to be output
+  coverage: "REPLACE_ME" # The folder where you want the code coverage to be output
+  regression_reports: "REPLACE_ME" # The folder where you want the regression reports to be output
+
 target_args: &target_args
+  <<: *required_args
   target_env: {}
-  target_exe: "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\fuzz.exe"
   target_options: []
 
-inputs: &inputs "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\seeds"
-
 tasks:
   - type: LibFuzzer
     <<: *target_args
-    inputs: *inputs
-    crashes: &crash "./crashes"
     readonly_inputs: []
     check_fuzzer_help: true
 
-  - type: "Report"
+  - type: LibfuzzerRegression
     <<: *target_args
-    input_queue: *crash
-    crashes: *crash
-    reports: "./reports"
-    unique_reports: "./unique_reports"
-    no_repro: "./no_repro"
+
+  - type: "LibfuzzerCrashReport"
+    <<: *target_args
+    input_queue: *crashes
     check_fuzzer_help: true
 
   - type: "Coverage"
@@ -35,4 +38,11 @@ tasks:
       - "{input}"
     input_queue: *inputs
     readonly_inputs: [*inputs]
-    coverage: "./coverage"
+
+  # The analysis task is optional in the libfuzzer_basic template
+  # - type: Analysis
+  #   <<: *target_args
+  #   analysis: "REPLACE_ME" # The folder where you want the analysis results to be output
+  #   analyzer_exe: "REPLACE_ME"
+  #   analyzer_options: []
+  #   analyzer_env: {}
diff --git a/src/agent/onefuzz-task/src/local/generic_analysis.rs b/src/agent/onefuzz-task/src/local/generic_analysis.rs
index 3d3e2fafc8..429e7b0e3b 100644
--- a/src/agent/onefuzz-task/src/local/generic_analysis.rs
+++ b/src/agent/onefuzz-task/src/local/generic_analysis.rs
@@ -3,139 +3,13 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::{
-    local::common::{
-        build_local_context, get_cmd_arg, get_cmd_exe, get_hash_map, get_synced_dir, CmdType,
-        SyncCountDirMonitor, UiEvent, ANALYSIS_DIR, ANALYZER_ENV, ANALYZER_EXE, ANALYZER_OPTIONS,
-        CRASHES_DIR, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TOOLS_DIR,
-        UNIQUE_REPORTS_DIR,
-    },
-    tasks::{
-        analysis::generic::{run as run_analysis, Config},
-        config::CommonConfig,
-    },
-};
+use crate::tasks::config::CommonConfig;
 use anyhow::Result;
 use async_trait::async_trait;
-use clap::{Arg, Command};
-use flume::Sender;
 use schemars::JsonSchema;
-use storage_queue::QueueClient;
 
 use super::template::{RunContext, Template};
 
-pub fn build_analysis_config(
-    args: &clap::ArgMatches,
-    input_queue: Option<QueueClient>,
-    common: CommonConfig,
-    event_sender: Option<Sender<UiEvent>>,
-) -> Result<Config> {
-    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
-    let target_options = get_cmd_arg(CmdType::Target, args);
-
-    let analyzer_exe = args
-        .get_one::<String>(ANALYZER_EXE)
-        .cloned()
-        .ok_or_else(|| format_err!("expected {ANALYZER_EXE}"))?;
-
-    let analyzer_options = args
-        .get_many::<String>(ANALYZER_OPTIONS)
-        .unwrap_or_default()
-        .map(|x| x.to_string())
-        .collect();
-
-    let analyzer_env = get_hash_map(args, ANALYZER_ENV)?;
-    let analysis = get_synced_dir(ANALYSIS_DIR, common.job_id, common.task_id, args)?
-        .monitor_count(&event_sender)?;
-    let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args)?;
-    let crashes = if input_queue.is_none() {
-        get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)
-            .ok()
-            .monitor_count(&event_sender)?
-    } else {
-        None
-    };
-    let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-    let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-    let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-
-    let config = Config {
-        analyzer_exe,
-        analyzer_options,
-        analyzer_env,
-        target_exe,
-        target_options,
-        input_queue,
-        crashes,
-        analysis,
-        tools: Some(tools),
-        reports,
-        unique_reports,
-        no_repro,
-        common,
-    };
-
-    Ok(config)
-}
-
-pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
-    let context = build_local_context(args, true, event_sender.clone()).await?;
-    let config = build_analysis_config(args, None, context.common_config.clone(), event_sender)?;
-    run_analysis(config).await
-}
-
-pub fn build_shared_args(required_task: bool) -> Vec<Arg> {
-    vec![
-        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
-        Arg::new(TARGET_ENV)
-            .long(TARGET_ENV)
-            .requires(TARGET_EXE)
-            .num_args(0..),
-        Arg::new(TARGET_OPTIONS)
-            .long(TARGET_OPTIONS)
-            .default_value("{input}")
-            .value_delimiter(' ')
-            .help("Use a quoted string with space separation to denote multiple arguments"),
-        Arg::new(CRASHES_DIR)
-            .long(CRASHES_DIR)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(ANALYZER_OPTIONS)
-            .long(ANALYZER_OPTIONS)
-            .requires(ANALYZER_EXE)
-            .value_delimiter(' ')
-            .help("Use a quoted string with space separation to denote multiple arguments"),
-        Arg::new(ANALYZER_ENV)
-            .long(ANALYZER_ENV)
-            .requires(ANALYZER_EXE)
-            .num_args(0..),
-        Arg::new(TOOLS_DIR)
-            .long(TOOLS_DIR)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(ANALYZER_EXE)
-            .long(ANALYZER_EXE)
-            .requires(ANALYSIS_DIR)
-            .requires(CRASHES_DIR)
-            .required(required_task),
-        Arg::new(ANALYSIS_DIR)
-            .long(ANALYSIS_DIR)
-            .requires(ANALYZER_EXE)
-            .requires(CRASHES_DIR)
-            .required(required_task),
-    ]
-}
-
-pub fn args(name: &'static str) -> Command {
-    Command::new(name)
-        .about("execute a local-only generic analysis")
-        .args(&build_shared_args(true))
-}
-
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct Analysis {
     analyzer_exe: String,
@@ -146,7 +20,7 @@ pub struct Analysis {
     input_queue: Option<PathBuf>,
     crashes: Option<PathBuf>,
     analysis: PathBuf,
-    tools: PathBuf,
+    tools: Option<PathBuf>,
     reports: Option<PathBuf>,
     unique_reports: Option<PathBuf>,
     no_repro: Option<PathBuf>,
@@ -175,9 +49,10 @@ impl Template for Analysis {
                 .and_then(|path| context.to_monitored_sync_dir("crashes", path).ok()),
 
             analysis: context.to_monitored_sync_dir("analysis", self.analysis.clone())?,
-            tools: context
-                .to_monitored_sync_dir("tools", self.tools.clone())
-                .ok(),
+            tools: self
+                .tools
+                .as_ref()
+                .and_then(|path| context.to_monitored_sync_dir("tools", path).ok()),
 
             reports: self
                 .reports
diff --git a/src/agent/onefuzz-task/src/local/generic_crash_report.rs b/src/agent/onefuzz-task/src/local/generic_crash_report.rs
index 6b0e2fccad..347a8cac76 100644
--- a/src/agent/onefuzz-task/src/local/generic_crash_report.rs
+++ b/src/agent/onefuzz-task/src/local/generic_crash_report.rs
@@ -3,150 +3,14 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::{
-    local::common::{
-        build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType,
-        SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, CRASHES_DIR,
-        DISABLE_CHECK_DEBUGGER, DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV,
-        TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR,
-    },
-    tasks::{
-        config::CommonConfig,
-        report::generic::{Config, ReportTask},
-        utils::default_bool_true,
-    },
-};
+use crate::tasks::{config::CommonConfig, utils::default_bool_true};
 use anyhow::Result;
 use async_trait::async_trait;
-use clap::{Arg, ArgAction, Command};
-use flume::Sender;
 use futures::future::OptionFuture;
 use schemars::JsonSchema;
-use storage_queue::QueueClient;
 
 use super::template::{RunContext, Template};
 
-pub fn build_report_config(
-    args: &clap::ArgMatches,
-    input_queue: Option<QueueClient>,
-    common: CommonConfig,
-    event_sender: Option<Sender<UiEvent>>,
-) -> Result<Config> {
-    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
-    let target_env = get_cmd_env(CmdType::Target, args)?;
-    let target_options = get_cmd_arg(CmdType::Target, args);
-
-    let crashes = Some(get_synced_dir(
-        CRASHES_DIR,
-        common.job_id,
-        common.task_id,
-        args,
-    )?)
-    .monitor_count(&event_sender)?;
-    let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-    let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-
-    let unique_reports = Some(get_synced_dir(
-        UNIQUE_REPORTS_DIR,
-        common.job_id,
-        common.task_id,
-        args,
-    )?)
-    .monitor_count(&event_sender)?;
-
-    let target_timeout = args.get_one::<u64>(TARGET_TIMEOUT).copied();
-
-    let check_retry_count = args
-        .get_one::<u64>(CHECK_RETRY_COUNT)
-        .copied()
-        .expect("has a default");
-
-    let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE);
-    let check_asan_log = args.get_flag(CHECK_ASAN_LOG);
-    let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER);
-
-    let config = Config {
-        target_exe,
-        target_env,
-        target_options,
-        target_timeout,
-        check_asan_log,
-        check_debugger,
-        check_retry_count,
-        check_queue,
-        crashes,
-        minimized_stack_depth: None,
-        input_queue,
-        no_repro,
-        reports,
-        unique_reports,
-        common,
-    };
-
-    Ok(config)
-}
-
-pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
-    let context = build_local_context(args, true, event_sender.clone()).await?;
-    let config = build_report_config(args, None, context.common_config.clone(), event_sender)?;
-    ReportTask::new(config).managed_run().await
-}
-
-pub fn build_shared_args() -> Vec<Arg> {
-    vec![
-        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
-        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
-        Arg::new(TARGET_OPTIONS)
-            .default_value("{input}")
-            .long(TARGET_OPTIONS)
-            .value_delimiter(' ')
-            .help("Use a quoted string with space separation to denote multiple arguments"),
-        Arg::new(CRASHES_DIR)
-            .long(CRASHES_DIR)
-            .required(true)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(REPORTS_DIR)
-            .long(REPORTS_DIR)
-            .required(false)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(NO_REPRO_DIR)
-            .long(NO_REPRO_DIR)
-            .required(false)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(UNIQUE_REPORTS_DIR)
-            .long(UNIQUE_REPORTS_DIR)
-            .value_parser(value_parser!(PathBuf))
-            .required(true),
-        Arg::new(TARGET_TIMEOUT)
-            .long(TARGET_TIMEOUT)
-            .value_parser(value_parser!(u64))
-            .default_value("30"),
-        Arg::new(CHECK_RETRY_COUNT)
-            .long(CHECK_RETRY_COUNT)
-            .value_parser(value_parser!(u64))
-            .default_value("0"),
-        Arg::new(DISABLE_CHECK_QUEUE)
-            .action(ArgAction::SetTrue)
-            .long(DISABLE_CHECK_QUEUE),
-        Arg::new(CHECK_ASAN_LOG)
-            .action(ArgAction::SetTrue)
-            .long(CHECK_ASAN_LOG),
-        Arg::new(DISABLE_CHECK_DEBUGGER)
-            .action(ArgAction::SetTrue)
-            .long(DISABLE_CHECK_DEBUGGER),
-    ]
-}
-
-pub fn args(name: &'static str) -> Command {
-    Command::new(name)
-        .about("execute a local-only generic crash report")
-        .args(&build_shared_args())
-}
-
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct CrashReport {
     target_exe: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/generic_generator.rs b/src/agent/onefuzz-task/src/local/generic_generator.rs
index 823ba221d6..ae9f6a3cc6 100644
--- a/src/agent/onefuzz-task/src/local/generic_generator.rs
+++ b/src/agent/onefuzz-task/src/local/generic_generator.rs
@@ -3,154 +3,14 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::{
-    local::common::{
-        build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir,
-        get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT,
-        CRASHES_DIR, DISABLE_CHECK_DEBUGGER, GENERATOR_ENV, GENERATOR_EXE, GENERATOR_OPTIONS,
-        READONLY_INPUTS, RENAME_OUTPUT, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT,
-        TOOLS_DIR,
-    },
-    tasks::{
-        config::CommonConfig,
-        fuzz::generator::{Config, GeneratorTask},
-        utils::default_bool_true,
-    },
-};
+use crate::tasks::{config::CommonConfig, utils::default_bool_true};
 use anyhow::Result;
 use async_trait::async_trait;
-use clap::{Arg, ArgAction, Command};
-use flume::Sender;
 use onefuzz::syncdir::SyncedDir;
 use schemars::JsonSchema;
 
 use super::template::{RunContext, Template};
 
-pub fn build_fuzz_config(
-    args: &clap::ArgMatches,
-    common: CommonConfig,
-    event_sender: Option<Sender<UiEvent>>,
-) -> Result<Config> {
-    let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)?
-        .monitor_count(&event_sender)?;
-    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
-    let target_options = get_cmd_arg(CmdType::Target, args);
-    let target_env = get_cmd_env(CmdType::Target, args)?;
-
-    let generator_exe = get_cmd_exe(CmdType::Generator, args)?;
-    let generator_options = get_cmd_arg(CmdType::Generator, args);
-    let generator_env = get_cmd_env(CmdType::Generator, args)?;
-    let readonly_inputs = get_synced_dirs(READONLY_INPUTS, common.job_id, common.task_id, args)?
-        .into_iter()
-        .map(|sd| sd.monitor_count(&event_sender))
-        .collect::<Result<Vec<_>>>()?;
-
-    let rename_output = args.get_flag(RENAME_OUTPUT);
-    let check_asan_log = args.get_flag(CHECK_ASAN_LOG);
-    let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER);
-
-    let check_retry_count = args
-        .get_one::<u64>(CHECK_RETRY_COUNT)
-        .copied()
-        .expect("has a default");
-
-    let target_timeout = Some(
-        args.get_one::<u64>(TARGET_TIMEOUT)
-            .copied()
-            .expect("has a default"),
-    );
-
-    let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-
-    let ensemble_sync_delay = None;
-
-    let config = Config {
-        generator_exe,
-        generator_env,
-        generator_options,
-        readonly_inputs,
-        crashes,
-        tools,
-        target_exe,
-        target_env,
-        target_options,
-        target_timeout,
-        check_asan_log,
-        check_debugger,
-        check_retry_count,
-        rename_output,
-        ensemble_sync_delay,
-        common,
-    };
-
-    Ok(config)
-}
-
-pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
-    let context = build_local_context(args, true, event_sender.clone()).await?;
-    let config = build_fuzz_config(args, context.common_config.clone(), event_sender)?;
-    GeneratorTask::new(config).run().await
-}
-
-pub fn build_shared_args() -> Vec<Arg> {
-    vec![
-        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
-        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
-        Arg::new(TARGET_OPTIONS)
-            .default_value("{input}")
-            .long(TARGET_OPTIONS)
-            .value_delimiter(' ')
-            .help("Use a quoted string with space separation to denote multiple arguments"),
-        Arg::new(GENERATOR_EXE)
-            .long(GENERATOR_EXE)
-            .default_value("radamsa")
-            .required(true),
-        Arg::new(GENERATOR_ENV).long(GENERATOR_ENV).num_args(0..),
-        Arg::new(GENERATOR_OPTIONS)
-            .long(GENERATOR_OPTIONS)
-            .value_delimiter(' ')
-            .default_value("-H sha256 -o {generated_inputs}/input-%h.%s -n 100 -r {input_corpus}")
-            .help("Use a quoted string with space separation to denote multiple arguments"),
-        Arg::new(CRASHES_DIR)
-            .required(true)
-            .long(CRASHES_DIR)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(READONLY_INPUTS)
-            .required(true)
-            .num_args(1..)
-            .value_parser(value_parser!(PathBuf))
-            .long(READONLY_INPUTS),
-        Arg::new(TOOLS_DIR)
-            .long(TOOLS_DIR)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(CHECK_RETRY_COUNT)
-            .long(CHECK_RETRY_COUNT)
-            .value_parser(value_parser!(u64))
-            .default_value("0"),
-        Arg::new(CHECK_ASAN_LOG)
-            .action(ArgAction::SetTrue)
-            .long(CHECK_ASAN_LOG),
-        Arg::new(RENAME_OUTPUT)
-            .action(ArgAction::SetTrue)
-            .long(RENAME_OUTPUT),
-        Arg::new(TARGET_TIMEOUT)
-            .long(TARGET_TIMEOUT)
-            .value_parser(value_parser!(u64))
-            .default_value("30"),
-        Arg::new(DISABLE_CHECK_DEBUGGER)
-            .action(ArgAction::SetTrue)
-            .long(DISABLE_CHECK_DEBUGGER),
-    ]
-}
-
-pub fn args(name: &'static str) -> Command {
-    Command::new(name)
-        .about("execute a local-only generator fuzzing task")
-        .args(&build_shared_args())
-}
-
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct Generator {
     generator_exe: String,
diff --git a/src/agent/onefuzz-task/src/local/libfuzzer.rs b/src/agent/onefuzz-task/src/local/libfuzzer.rs
index 56dff7dbe3..433636be1c 100644
--- a/src/agent/onefuzz-task/src/local/libfuzzer.rs
+++ b/src/agent/onefuzz-task/src/local/libfuzzer.rs
@@ -1,168 +1,19 @@
 // Copyright (c) Microsoft Corporation.
 // Licensed under the MIT License.
 
-#[cfg(any(target_os = "linux", target_os = "windows"))]
-use crate::{
-    local::{common::COVERAGE_DIR, coverage, coverage::build_shared_args as build_coverage_args},
-    tasks::coverage::generic::CoverageTask,
-};
-use crate::{
-    local::{
-        common::{
-            build_local_context, wait_for_dir, DirectoryMonitorQueue, UiEvent, ANALYZER_EXE,
-            REGRESSION_REPORTS_DIR, UNIQUE_REPORTS_DIR,
-        },
-        generic_analysis::{build_analysis_config, build_shared_args as build_analysis_args},
-        libfuzzer_crash_report::{build_report_config, build_shared_args as build_crash_args},
-        libfuzzer_fuzz::{build_fuzz_config, build_shared_args as build_fuzz_args},
-        libfuzzer_regression::{
-            build_regression_config, build_shared_args as build_regression_args,
-        },
-    },
-    tasks::{
-        analysis::generic::run as run_analysis,
-        config::CommonConfig,
-        fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask},
-        regression::libfuzzer::LibFuzzerRegressionTask,
-        report::libfuzzer_report::ReportTask,
-        utils::default_bool_true,
-    },
+use crate::tasks::{
+    config::CommonConfig,
+    fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask},
+    utils::default_bool_true,
 };
 use anyhow::Result;
 use async_trait::async_trait;
-use clap::Command;
-use flume::Sender;
-use onefuzz::{syncdir::SyncedDir, utils::try_wait_all_join_handles};
+use onefuzz::syncdir::SyncedDir;
 use schemars::JsonSchema;
-use std::{
-    collections::{HashMap, HashSet},
-    path::PathBuf,
-};
-use tokio::task::spawn;
-use uuid::Uuid;
+use std::{collections::HashMap, path::PathBuf};
 
 use super::template::{RunContext, Template};
 
-pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
-    let context = build_local_context(args, true, event_sender.clone()).await?;
-    let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?;
-    let crash_dir = fuzz_config
-        .crashes
-        .remote_url()?
-        .as_file_path()
-        .expect("invalid crash dir remote location");
-
-    let fuzzer = LibFuzzerFuzzTask::new(fuzz_config)?;
-    let mut task_handles = vec![];
-
-    let fuzz_task = spawn(async move { fuzzer.run().await });
-
-    wait_for_dir(&crash_dir).await?;
-
-    task_handles.push(fuzz_task);
-
-    if args.contains_id(UNIQUE_REPORTS_DIR) {
-        let crash_report_input_monitor =
-            DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?;
-
-        let report_config = build_report_config(
-            args,
-            Some(crash_report_input_monitor.queue_client),
-            CommonConfig {
-                task_id: Uuid::new_v4(),
-                ..context.common_config.clone()
-            },
-            event_sender.clone(),
-        )?;
-
-        let mut report = ReportTask::new(report_config);
-        let report_task = spawn(async move { report.managed_run().await });
-
-        task_handles.push(report_task);
-        task_handles.push(crash_report_input_monitor.handle);
-    }
-
-    #[cfg(any(target_os = "linux", target_os = "windows"))]
-    if args.contains_id(COVERAGE_DIR) {
-        let coverage_input_monitor =
-            DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?;
-        let coverage_config = coverage::build_coverage_config(
-            args,
-            true,
-            Some(coverage_input_monitor.queue_client),
-            CommonConfig {
-                task_id: Uuid::new_v4(),
-                ..context.common_config.clone()
-            },
-            event_sender.clone(),
-        )?;
-
-        let mut coverage = CoverageTask::new(coverage_config);
-        let coverage_task = spawn(async move { coverage.run().await });
-
-        task_handles.push(coverage_task);
-        task_handles.push(coverage_input_monitor.handle);
-    }
-
-    if args.contains_id(ANALYZER_EXE) {
-        let analysis_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir).await?;
-        let analysis_config = build_analysis_config(
-            args,
-            Some(analysis_input_monitor.queue_client),
-            CommonConfig {
-                task_id: Uuid::new_v4(),
-                ..context.common_config.clone()
-            },
-            event_sender.clone(),
-        )?;
-        let analysis_task = spawn(async move { run_analysis(analysis_config).await });
-
-        task_handles.push(analysis_task);
-        task_handles.push(analysis_input_monitor.handle);
-    }
-
-    if args.contains_id(REGRESSION_REPORTS_DIR) {
-        let regression_config = build_regression_config(
-            args,
-            CommonConfig {
-                task_id: Uuid::new_v4(),
-                ..context.common_config.clone()
-            },
-            event_sender,
-        )?;
-        let regression = LibFuzzerRegressionTask::new(regression_config);
-        let regression_task = spawn(async move { regression.run().await });
-        task_handles.push(regression_task);
-    }
-
-    try_wait_all_join_handles(task_handles).await?;
-
-    Ok(())
-}
-
-pub fn args(name: &'static str) -> Command {
-    let mut app = Command::new(name).about("run a local libfuzzer & crash reporting task");
-
-    let mut used = HashSet::new();
-
-    for args in &[
-        build_fuzz_args(),
-        build_crash_args(),
-        build_analysis_args(false),
-        #[cfg(any(target_os = "linux", target_os = "windows"))]
-        build_coverage_args(true),
-        build_regression_args(false),
-    ] {
-        for arg in args {
-            if used.insert(arg.get_id()) {
-                app = app.arg(arg);
-            }
-        }
-    }
-
-    app
-}
-
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct LibFuzzer {
     inputs: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs
index c1ab283575..04ba4f9225 100644
--- a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs
+++ b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs
@@ -3,139 +3,13 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::{
-    local::common::{
-        build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType,
-        SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, CRASHES_DIR,
-        DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS,
-        TARGET_TIMEOUT, UNIQUE_REPORTS_DIR,
-    },
-    tasks::{
-        config::CommonConfig,
-        report::libfuzzer_report::{Config, ReportTask},
-        utils::default_bool_true,
-    },
-};
+use crate::tasks::{config::CommonConfig, utils::default_bool_true};
 use anyhow::Result;
 use async_trait::async_trait;
-use clap::{Arg, ArgAction, Command};
-use flume::Sender;
 use futures::future::OptionFuture;
 use schemars::JsonSchema;
-use storage_queue::QueueClient;
 
 use super::template::{RunContext, Template};
-
-pub fn build_report_config(
-    args: &clap::ArgMatches,
-    input_queue: Option<QueueClient>,
-    common: CommonConfig,
-    event_sender: Option<Sender<UiEvent>>,
-) -> Result<Config> {
-    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
-    let target_env = get_cmd_env(CmdType::Target, args)?;
-    let target_options = get_cmd_arg(CmdType::Target, args);
-
-    let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-    let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-
-    let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-
-    let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-
-    let target_timeout = args.get_one::<u64>(TARGET_TIMEOUT).copied();
-
-    let check_retry_count = args
-        .get_one::<u64>(CHECK_RETRY_COUNT)
-        .copied()
-        .expect("has a default");
-
-    let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE);
-
-    let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP);
-
-    let crashes = if input_queue.is_none() { crashes } else { None };
-
-    let config = Config {
-        target_exe,
-        target_env,
-        target_options,
-        target_timeout,
-        check_retry_count,
-        check_fuzzer_help,
-        minimized_stack_depth: None,
-        input_queue,
-        check_queue,
-        crashes,
-        reports,
-        no_repro,
-        unique_reports,
-        common,
-    };
-
-    Ok(config)
-}
-
-pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
-    let context = build_local_context(args, true, event_sender.clone()).await?;
-    let config = build_report_config(args, None, context.common_config.clone(), event_sender)?;
-    ReportTask::new(config).managed_run().await
-}
-
-pub fn build_shared_args() -> Vec<Arg> {
-    vec![
-        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
-        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
-        Arg::new(TARGET_OPTIONS)
-            .long(TARGET_OPTIONS)
-            .value_delimiter(' ')
-            .help("Use a quoted string with space separation to denote multiple arguments"),
-        Arg::new(CRASHES_DIR)
-            .long(CRASHES_DIR)
-            .required(true)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(REPORTS_DIR)
-            .long(REPORTS_DIR)
-            .required(false)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(NO_REPRO_DIR)
-            .long(NO_REPRO_DIR)
-            .required(false)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(UNIQUE_REPORTS_DIR)
-            .long(UNIQUE_REPORTS_DIR)
-            .required(true)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(TARGET_TIMEOUT)
-            .value_parser(value_parser!(u64))
-            .long(TARGET_TIMEOUT),
-        Arg::new(CHECK_RETRY_COUNT)
-            .long(CHECK_RETRY_COUNT)
-            .value_parser(value_parser!(u64))
-            .default_value("0"),
-        Arg::new(DISABLE_CHECK_QUEUE)
-            .action(ArgAction::SetTrue)
-            .long(DISABLE_CHECK_QUEUE),
-        Arg::new(CHECK_FUZZER_HELP)
-            .action(ArgAction::SetTrue)
-            .long(CHECK_FUZZER_HELP),
-    ]
-}
-
-pub fn args(name: &'static str) -> Command {
-    Command::new(name)
-        .about("execute a local-only libfuzzer crash report task")
-        .args(&build_shared_args())
-}
-
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct LibfuzzerCrashReport {
     target_exe: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs
index 69c9df820b..4b3e4ce58f 100644
--- a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs
+++ b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs
@@ -3,97 +3,15 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::{
-    local::common::{
-        build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir,
-        get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, ANALYSIS_INPUTS,
-        ANALYSIS_UNIQUE_INPUTS, CHECK_FUZZER_HELP, INPUTS_DIR, PRESERVE_EXISTING_OUTPUTS,
-        TARGET_ENV, TARGET_EXE, TARGET_OPTIONS,
-    },
-    tasks::{
-        config::CommonConfig,
-        merge::libfuzzer_merge::{spawn, Config},
-        utils::default_bool_true,
-    },
-};
+use crate::tasks::{config::CommonConfig, utils::default_bool_true};
 use anyhow::Result;
 use async_trait::async_trait;
-use clap::{Arg, ArgAction, Command};
-use flume::Sender;
 use futures::future::OptionFuture;
 use onefuzz::syncdir::SyncedDir;
 use schemars::JsonSchema;
-use storage_queue::QueueClient;
 
 use super::template::{RunContext, Template};
 
-pub fn build_merge_config(
-    args: &clap::ArgMatches,
-    input_queue: Option<QueueClient>,
-    common: CommonConfig,
-    event_sender: Option<Sender<UiEvent>>,
-) -> Result<Config> {
-    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
-    let target_env = get_cmd_env(CmdType::Target, args)?;
-    let target_options = get_cmd_arg(CmdType::Target, args);
-    let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP);
-    let inputs = get_synced_dirs(ANALYSIS_INPUTS, common.job_id, common.task_id, args)?
-        .into_iter()
-        .map(|sd| sd.monitor_count(&event_sender))
-        .collect::<Result<Vec<_>>>()?;
-    let unique_inputs =
-        get_synced_dir(ANALYSIS_UNIQUE_INPUTS, common.job_id, common.task_id, args)?
-            .monitor_count(&event_sender)?;
-    let preserve_existing_outputs = args
-        .get_one::<bool>(PRESERVE_EXISTING_OUTPUTS)
-        .copied()
-        .unwrap_or_default();
-
-    let config = Config {
-        target_exe,
-        target_env,
-        target_options,
-        input_queue,
-        inputs,
-        unique_inputs,
-        preserve_existing_outputs,
-        check_fuzzer_help,
-        common,
-    };
-
-    Ok(config)
-}
-
-pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
-    let context = build_local_context(args, true, event_sender.clone()).await?;
-    let config = build_merge_config(args, None, context.common_config.clone(), event_sender)?;
-    spawn(config).await
-}
-
-pub fn build_shared_args() -> Vec<Arg> {
-    vec![
-        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
-        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
-        Arg::new(TARGET_OPTIONS)
-            .long(TARGET_OPTIONS)
-            .value_delimiter(' ')
-            .help("Use a quoted string with space separation to denote multiple arguments"),
-        Arg::new(CHECK_FUZZER_HELP)
-            .action(ArgAction::SetTrue)
-            .long(CHECK_FUZZER_HELP),
-        Arg::new(INPUTS_DIR)
-            .long(INPUTS_DIR)
-            .value_parser(value_parser!(PathBuf))
-            .num_args(0..),
-    ]
-}
-
-pub fn args(name: &'static str) -> Command {
-    Command::new(name)
-        .about("execute a local-only libfuzzer crash report task")
-        .args(&build_shared_args())
-}
-
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct LibfuzzerMerge {
     target_exe: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs
index 501d2385e2..3fbb9f0bd6 100644
--- a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs
+++ b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs
@@ -3,145 +3,13 @@
 
 use std::{collections::HashMap, path::PathBuf};
 
-use crate::{
-    local::common::{
-        build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType,
-        SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, COVERAGE_DIR,
-        CRASHES_DIR, NO_REPRO_DIR, REGRESSION_REPORTS_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE,
-        TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR,
-    },
-    tasks::{
-        config::CommonConfig,
-        regression::libfuzzer::{Config, LibFuzzerRegressionTask},
-        utils::default_bool_true,
-    },
-};
+use crate::tasks::{config::CommonConfig, utils::default_bool_true};
 use anyhow::Result;
 use async_trait::async_trait;
-use clap::{Arg, ArgAction, Command};
-use flume::Sender;
 use schemars::JsonSchema;
 
 use super::template::{RunContext, Template};
 
-const REPORT_NAMES: &str = "report_names";
-
-pub fn build_regression_config(
-    args: &clap::ArgMatches,
-    common: CommonConfig,
-    event_sender: Option<Sender<UiEvent>>,
-) -> Result<Config> {
-    let target_exe = get_cmd_exe(CmdType::Target, args)?.into();
-    let target_env = get_cmd_env(CmdType::Target, args)?;
-    let target_options = get_cmd_arg(CmdType::Target, args);
-    let target_timeout = args.get_one::<u64>(TARGET_TIMEOUT).copied();
-    let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)?
-        .monitor_count(&event_sender)?;
-    let regression_reports =
-        get_synced_dir(REGRESSION_REPORTS_DIR, common.job_id, common.task_id, args)?
-            .monitor_count(&event_sender)?;
-    let check_retry_count = args
-        .get_one::<u64>(CHECK_RETRY_COUNT)
-        .copied()
-        .expect("has a default value");
-
-    let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-    let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-    let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args)
-        .ok()
-        .monitor_count(&event_sender)?;
-
-    let report_list: Option<Vec<String>> = args
-        .get_many::<String>(REPORT_NAMES)
-        .map(|x| x.cloned().collect());
-
-    let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP);
-
-    let config = Config {
-        target_exe,
-        target_env,
-        target_options,
-        target_timeout,
-        check_fuzzer_help,
-        check_retry_count,
-        crashes,
-        regression_reports,
-        reports,
-        no_repro,
-        unique_reports,
-        readonly_inputs: None,
-        report_list,
-        minimized_stack_depth: None,
-        common,
-    };
-    Ok(config)
-}
-
-pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
-    let context = build_local_context(args, true, event_sender.clone()).await?;
-    let config = build_regression_config(args, context.common_config.clone(), event_sender)?;
-    LibFuzzerRegressionTask::new(config).run().await
-}
-
-pub fn build_shared_args(local_job: bool) -> Vec<Arg> {
-    let mut args = vec![
-        Arg::new(TARGET_EXE).long(TARGET_EXE).required(true),
-        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
-        Arg::new(TARGET_OPTIONS)
-            .long(TARGET_OPTIONS)
-            .value_delimiter(' ')
-            .help("Use a quoted string with space separation to denote multiple arguments"),
-        Arg::new(COVERAGE_DIR)
-            .required(!local_job)
-            .long(COVERAGE_DIR)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(CHECK_FUZZER_HELP)
-            .action(ArgAction::SetTrue)
-            .long(CHECK_FUZZER_HELP),
-        Arg::new(TARGET_TIMEOUT)
-            .long(TARGET_TIMEOUT)
-            .value_parser(value_parser!(u64)),
-        Arg::new(CRASHES_DIR)
-            .long(CRASHES_DIR)
-            .required(true)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(REGRESSION_REPORTS_DIR)
-            .long(REGRESSION_REPORTS_DIR)
-            .required(local_job)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(REPORTS_DIR)
-            .long(REPORTS_DIR)
-            .required(false)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(NO_REPRO_DIR)
-            .long(NO_REPRO_DIR)
-            .required(false)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(UNIQUE_REPORTS_DIR)
-            .long(UNIQUE_REPORTS_DIR)
-            .value_parser(value_parser!(PathBuf))
-            .required(true),
-        Arg::new(CHECK_RETRY_COUNT)
-            .long(CHECK_RETRY_COUNT)
-            .value_parser(value_parser!(u64))
-            .default_value("0"),
-    ];
-    if local_job {
-        args.push(Arg::new(REPORT_NAMES).long(REPORT_NAMES).num_args(0..))
-    }
-    args
-}
-
-pub fn args(name: &'static str) -> Command {
-    Command::new(name)
-        .about("execute a local-only libfuzzer regression task")
-        .args(&build_shared_args(true))
-}
-
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct LibfuzzerRegression {
     target_exe: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs
index 9c6f16094e..5bef2347f7 100644
--- a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs
+++ b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs
@@ -1,97 +1,14 @@
 // Copyright (c) Microsoft Corporation.
 // Licensed under the MIT License.
 
-use crate::{
-    local::common::{
-        build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_RETRY_COUNT,
-        TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT,
-    },
-    tasks::report::libfuzzer_report::{test_input, TestInputArgs},
-};
 use anyhow::Result;
 use async_trait::async_trait;
-use clap::{Arg, Command};
-use flume::Sender;
 use onefuzz::machine_id::MachineIdentity;
 use schemars::JsonSchema;
 use std::{collections::HashMap, path::PathBuf};
 
 use super::template::{RunContext, Template};
 
-pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
-    let context = build_local_context(args, true, event_sender).await?;
-
-    let target_exe = args
-        .get_one::<PathBuf>(TARGET_EXE)
-        .expect("marked as required");
-    let target_env = get_cmd_env(CmdType::Target, args)?;
-    let target_options = get_cmd_arg(CmdType::Target, args);
-    let input = args
-        .get_one::<PathBuf>("input")
-        .expect("marked as required");
-    let target_timeout = args.get_one::<u64>(TARGET_TIMEOUT).copied();
-    let check_retry_count = args
-        .get_one::<u64>(CHECK_RETRY_COUNT)
-        .copied()
-        .expect("has a default value");
-
-    let extra_setup_dir = context.common_config.extra_setup_dir.as_deref();
-    let extra_output_dir = context
-        .common_config
-        .extra_output
-        .as_ref()
-        .map(|x| x.local_path.as_path());
-
-    let config = TestInputArgs {
-        target_exe: target_exe.as_path(),
-        target_env: &target_env,
-        target_options: &target_options,
-        input_url: None,
-        input: input.as_path(),
-        job_id: context.common_config.job_id,
-        task_id: context.common_config.task_id,
-        target_timeout,
-        check_retry_count,
-        setup_dir: &context.common_config.setup_dir,
-        extra_setup_dir,
-        extra_output_dir,
-        minimized_stack_depth: None,
-        machine_identity: context.common_config.machine_identity,
-    };
-
-    let result = test_input(config).await?;
-    println!("{}", serde_json::to_string_pretty(&result)?);
-    Ok(())
-}
-
-pub fn build_shared_args() -> Vec<Arg> {
-    vec![
-        Arg::new(TARGET_EXE).required(true),
-        Arg::new("input")
-            .required(true)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
-        Arg::new(TARGET_OPTIONS)
-            .default_value("{input}")
-            .long(TARGET_OPTIONS)
-            .value_delimiter(' ')
-            .help("Use a quoted string with space separation to denote multiple arguments"),
-        Arg::new(TARGET_TIMEOUT)
-            .long(TARGET_TIMEOUT)
-            .value_parser(value_parser!(u64)),
-        Arg::new(CHECK_RETRY_COUNT)
-            .long(CHECK_RETRY_COUNT)
-            .value_parser(value_parser!(u64))
-            .default_value("0"),
-    ]
-}
-
-pub fn args(name: &'static str) -> Command {
-    Command::new(name)
-        .about("test a libfuzzer application with a specific input")
-        .args(&build_shared_args())
-}
-
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct LibfuzzerTestInput {
     input: PathBuf,
diff --git a/src/agent/onefuzz-task/src/local/mod.rs b/src/agent/onefuzz-task/src/local/mod.rs
index 03d394bcdb..385ff8ffcd 100644
--- a/src/agent/onefuzz-task/src/local/mod.rs
+++ b/src/agent/onefuzz-task/src/local/mod.rs
@@ -14,7 +14,6 @@ pub mod libfuzzer_fuzz;
 pub mod libfuzzer_merge;
 pub mod libfuzzer_regression;
 pub mod libfuzzer_test_input;
-pub mod radamsa;
 pub mod template;
 pub mod test_input;
 pub mod tui;
diff --git a/src/agent/onefuzz-task/src/local/radamsa.rs b/src/agent/onefuzz-task/src/local/radamsa.rs
deleted file mode 100644
index 4d84de027a..0000000000
--- a/src/agent/onefuzz-task/src/local/radamsa.rs
+++ /dev/null
@@ -1,78 +0,0 @@
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT License.
-
-use crate::{
-    local::{
-        common::{build_local_context, DirectoryMonitorQueue, UiEvent},
-        generic_crash_report::{build_report_config, build_shared_args as build_crash_args},
-        generic_generator::{build_fuzz_config, build_shared_args as build_fuzz_args},
-    },
-    tasks::{config::CommonConfig, fuzz::generator::GeneratorTask, report::generic::ReportTask},
-};
-use anyhow::{Context, Result};
-use clap::Command;
-use flume::Sender;
-use onefuzz::utils::try_wait_all_join_handles;
-use std::collections::HashSet;
-use tokio::task::spawn;
-use uuid::Uuid;
-
-pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
-    let context = build_local_context(args, true, event_sender.clone()).await?;
-    let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?;
-    let crash_dir = fuzz_config
-        .crashes
-        .remote_url()?
-        .as_file_path()
-        .ok_or_else(|| format_err!("invalid crash directory"))?;
-
-    tokio::fs::create_dir_all(&crash_dir)
-        .await
-        .with_context(|| {
-            format!(
-                "unable to create crashes directory: {}",
-                crash_dir.display()
-            )
-        })?;
-
-    let fuzzer = GeneratorTask::new(fuzz_config);
-    let fuzz_task = spawn(async move { fuzzer.run().await });
-
-    let crash_report_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir)
-        .await
-        .context("directory monitor failed")?;
-    let report_config = build_report_config(
-        args,
-        Some(crash_report_input_monitor.queue_client),
-        CommonConfig {
-            task_id: Uuid::new_v4(),
-            ..context.common_config.clone()
-        },
-        event_sender,
-    )?;
-    let report_task = spawn(async move { ReportTask::new(report_config).managed_run().await });
-
-    try_wait_all_join_handles(vec![
-        fuzz_task,
-        report_task,
-        crash_report_input_monitor.handle,
-    ])
-    .await?;
-
-    Ok(())
-}
-
-pub fn args(name: &'static str) -> Command {
-    let mut app = Command::new(name).about("run a local generator & crash reporting job");
-
-    let mut used = HashSet::new();
-    for args in &[build_fuzz_args(), build_crash_args()] {
-        for arg in args {
-            if used.insert(arg.get_id()) {
-                app = app.arg(arg);
-            }
-        }
-    }
-
-    app
-}
diff --git a/src/agent/onefuzz-task/src/local/schema.json b/src/agent/onefuzz-task/src/local/schema.json
index 0a1f128e67..e5b00f6e17 100644
--- a/src/agent/onefuzz-task/src/local/schema.json
+++ b/src/agent/onefuzz-task/src/local/schema.json
@@ -126,7 +126,6 @@
             "analyzer_options",
             "target_exe",
             "target_options",
-            "tools",
             "type"
           ],
           "properties": {
@@ -182,7 +181,10 @@
               }
             },
             "tools": {
-              "type": "string"
+              "type": [
+                "string",
+                "null"
+              ]
             },
             "type": {
               "type": "string",
@@ -893,4 +895,4 @@
       ]
     }
   }
-}
+}
\ No newline at end of file
diff --git a/src/agent/onefuzz-task/src/local/template.rs b/src/agent/onefuzz-task/src/local/template.rs
index b2e0c425ff..73ae6e5e48 100644
--- a/src/agent/onefuzz-task/src/local/template.rs
+++ b/src/agent/onefuzz-task/src/local/template.rs
@@ -196,6 +196,7 @@ pub async fn launch(
         job_id: Uuid::new_v4(),
         instance_id: Uuid::new_v4(),
         heartbeat_queue: None,
+        job_result_queue: None,
         instance_telemetry_key: None,
         microsoft_telemetry_key: None,
         logs: None,
@@ -241,12 +242,10 @@ mod test {
             .expect("Couldn't find checked-in schema.json")
             .replace("\r\n", "\n");
 
-        println!("{}", schema_str);
-
-        assert_eq!(
-            schema_str.replace('\n', ""),
-            checked_in_schema.replace('\n', ""),
-            "The checked-in local fuzzing schema did not match the generated schema."
-        );
+        if schema_str.replace('\n', "") != checked_in_schema.replace('\n', "") {
+            std::fs::write("src/local/new.schema.json", schema_str)
+                .expect("The schemas did not match but failed to write new schema to file.");
+            panic!("The checked-in local fuzzing schema did not match the generated schema. The generated schema can be found at src/local/new.schema.json");
+        }
     }
 }
diff --git a/src/agent/onefuzz-task/src/local/test_input.rs b/src/agent/onefuzz-task/src/local/test_input.rs
index 4077bd08f8..b8027a7f41 100644
--- a/src/agent/onefuzz-task/src/local/test_input.rs
+++ b/src/agent/onefuzz-task/src/local/test_input.rs
@@ -1,18 +1,8 @@
 // Copyright (c) Microsoft Corporation.
 // Licensed under the MIT License.
 
-use crate::{
-    local::common::{
-        build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_ASAN_LOG,
-        CHECK_RETRY_COUNT, DISABLE_CHECK_DEBUGGER, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS,
-        TARGET_TIMEOUT,
-    },
-    tasks::report::generic::{test_input, TestInputArgs},
-};
 use anyhow::Result;
 use async_trait::async_trait;
-use clap::{Arg, ArgAction, Command};
-use flume::Sender;
 use onefuzz::machine_id::MachineIdentity;
 use schemars::JsonSchema;
 use std::{collections::HashMap, path::PathBuf};
@@ -20,82 +10,6 @@ use uuid::Uuid;
 
 use super::template::{RunContext, Template};
 
-pub async fn run(args: &clap::ArgMatches, event_sender: Option<Sender<UiEvent>>) -> Result<()> {
-    let context = build_local_context(args, false, event_sender).await?;
-
-    let target_exe = args
-        .get_one::<PathBuf>(TARGET_EXE)
-        .expect("is marked required");
-    let target_env = get_cmd_env(CmdType::Target, args)?;
-    let target_options = get_cmd_arg(CmdType::Target, args);
-    let input = args
-        .get_one::<PathBuf>("input")
-        .expect("is marked required");
-    let target_timeout = args.get_one::<u64>(TARGET_TIMEOUT).copied();
-    let check_retry_count = args
-        .get_one::<u64>(CHECK_RETRY_COUNT)
-        .copied()
-        .expect("has default value");
-    let check_asan_log = args.get_flag(CHECK_ASAN_LOG);
-    let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER);
-
-    let config = TestInputArgs {
-        target_exe: target_exe.as_path(),
-        target_env: &target_env,
-        target_options: &target_options,
-        input_url: None,
-        input: input.as_path(),
-        job_id: context.common_config.job_id,
-        task_id: context.common_config.task_id,
-        target_timeout,
-        check_retry_count,
-        setup_dir: &context.common_config.setup_dir,
-        extra_setup_dir: context.common_config.extra_setup_dir.as_deref(),
-        minimized_stack_depth: None,
-        check_asan_log,
-        check_debugger,
-        machine_identity: context.common_config.machine_identity.clone(),
-    };
-
-    let result = test_input(config).await?;
-    println!("{}", serde_json::to_string_pretty(&result)?);
-    Ok(())
-}
-
-pub fn build_shared_args() -> Vec<Arg> {
-    vec![
-        Arg::new(TARGET_EXE).required(true),
-        Arg::new("input")
-            .required(true)
-            .value_parser(value_parser!(PathBuf)),
-        Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..),
-        Arg::new(TARGET_OPTIONS)
-            .default_value("{input}")
-            .long(TARGET_OPTIONS)
-            .value_delimiter(' ')
-            .help("Use a quoted string with space separation to denote multiple arguments"),
-        Arg::new(TARGET_TIMEOUT)
-            .long(TARGET_TIMEOUT)
-            .value_parser(value_parser!(u64)),
-        Arg::new(CHECK_RETRY_COUNT)
-            .long(CHECK_RETRY_COUNT)
-            .value_parser(value_parser!(u64))
-            .default_value("0"),
-        Arg::new(CHECK_ASAN_LOG)
-            .action(ArgAction::SetTrue)
-            .long(CHECK_ASAN_LOG),
-        Arg::new(DISABLE_CHECK_DEBUGGER)
-            .action(ArgAction::SetTrue)
-            .long("disable_check_debugger"),
-    ]
-}
-
-pub fn args(name: &'static str) -> Command {
-    Command::new(name)
-        .about("test an application with a specific input")
-        .args(&build_shared_args())
-}
-
 #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
 pub struct TestInput {
     input: PathBuf,
diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs
index 3ba068a614..05c6c3d169 100644
--- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs
@@ -65,6 +65,8 @@ pub async fn run(config: Config) -> Result<()> {
         tools.init_pull().await?;
     }
 
+    let job_result_client = config.common.init_job_result().await?;
+
     // the tempdir is always created, however, the reports_path and
     // reports_monitor_future are only created if we have one of the three
     // report SyncedDir. The idea is that the option for where to write reports
@@ -88,6 +90,7 @@ pub async fn run(config: Config) -> Result<()> {
                 &config.unique_reports,
                 &config.reports,
                 &config.no_repro,
+                &job_result_client,
             );
             (
                 Some(reports_dir.path().to_path_buf()),
@@ -171,7 +174,7 @@ async fn poll_inputs(
                 }
                 message.delete().await?;
             } else {
-                warn!("no new candidate inputs found, sleeping");
+                debug!("no new candidate inputs found, sleeping");
                 delay_with_jitter(EMPTY_QUEUE_DELAY).await;
             }
         }
diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs
index 0848379d73..e29e0fd60d 100644
--- a/src/agent/onefuzz-task/src/tasks/config.rs
+++ b/src/agent/onefuzz-task/src/tasks/config.rs
@@ -14,6 +14,7 @@ use onefuzz::{
     machine_id::MachineIdentity,
     syncdir::{SyncOperation, SyncedDir},
 };
+use onefuzz_result::job_result::{init_job_result, TaskJobResultClient};
 use onefuzz_telemetry::{
     self as telemetry, Event::task_start, EventData, InstanceTelemetryKey, MicrosoftTelemetryKey,
     Role,
@@ -50,6 +51,8 @@ pub struct CommonConfig {
 
     pub heartbeat_queue: Option<Url>,
 
+    pub job_result_queue: Option<Url>,
+
     pub instance_telemetry_key: Option<InstanceTelemetryKey>,
 
     pub microsoft_telemetry_key: Option<MicrosoftTelemetryKey>,
@@ -103,6 +106,23 @@ impl CommonConfig {
             None => Ok(None),
         }
     }
+
+    pub async fn init_job_result(&self) -> Result<Option<TaskJobResultClient>> {
+        match &self.job_result_queue {
+            Some(url) => {
+                let result = init_job_result(
+                    url.clone(),
+                    self.task_id,
+                    self.job_id,
+                    self.machine_identity.machine_id,
+                    self.machine_identity.machine_name.clone(),
+                )
+                .await?;
+                Ok(Some(result))
+            }
+            None => Ok(None),
+        }
+    }
 }
 
 #[derive(Debug, Deserialize)]
diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs
index b112cfefbe..4fde9efb31 100644
--- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs
@@ -26,6 +26,8 @@ use onefuzz_file_format::coverage::{
     binary::{v1::BinaryCoverageJson as BinaryCoverageJsonV1, BinaryCoverageJson},
     source::{v1::SourceCoverageJson as SourceCoverageJsonV1, SourceCoverageJson},
 };
+use onefuzz_result::job_result::JobResultData;
+use onefuzz_result::job_result::{JobResultSender, TaskJobResultClient};
 use onefuzz_telemetry::{event, warn, Event::coverage_data, Event::coverage_failed, EventData};
 use storage_queue::{Message, QueueClient};
 use tokio::fs;
@@ -114,7 +116,7 @@ impl CoverageTask {
         let allowlist = self.load_target_allowlist().await?;
 
         let heartbeat = self.config.common.init_heartbeat(None).await?;
-
+        let job_result = self.config.common.init_job_result().await?;
         let mut seen_inputs = false;
 
         let target_exe_path =
@@ -129,6 +131,7 @@ impl CoverageTask {
             coverage,
             allowlist,
             heartbeat,
+            job_result,
             target_exe.to_string(),
         )?;
 
@@ -219,6 +222,7 @@ struct TaskContext<'a> {
     module_allowlist: AllowList,
     source_allowlist: Arc<AllowList>,
     heartbeat: Option<TaskHeartbeatClient>,
+    job_result: Option<TaskJobResultClient>,
     cache: Arc<DebugInfoCache>,
 }
 
@@ -228,6 +232,7 @@ impl<'a> TaskContext<'a> {
         coverage: BinaryCoverage,
         allowlist: TargetAllowList,
         heartbeat: Option<TaskHeartbeatClient>,
+        job_result: Option<TaskJobResultClient>,
         target_exe: String,
     ) -> Result<Self> {
         let cache = DebugInfoCache::new(allowlist.source_files.clone());
@@ -247,6 +252,7 @@ impl<'a> TaskContext<'a> {
             module_allowlist: allowlist.modules,
             source_allowlist: Arc::new(allowlist.source_files),
             heartbeat,
+            job_result,
             cache: Arc::new(cache),
         })
     }
@@ -455,7 +461,16 @@ impl<'a> TaskContext<'a> {
         let s = CoverageStats::new(&coverage);
         event!(coverage_data; Covered = s.covered, Features = s.features, Rate = s.rate);
         metric!(coverage_data; 1.0; Covered = s.covered, Features = s.features, Rate = s.rate);
-
+        self.job_result
+            .send_direct(
+                JobResultData::CoverageData,
+                HashMap::from([
+                    ("covered".to_string(), s.covered as f64),
+                    ("features".to_string(), s.features as f64),
+                    ("rate".to_string(), s.rate),
+                ]),
+            )
+            .await;
         Ok(())
     }
 
diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs
index d9116a1ed2..bd7511cac2 100644
--- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs
+++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs
@@ -73,6 +73,7 @@ impl GeneratorTask {
         }
 
         let hb_client = self.config.common.init_heartbeat(None).await?;
+        let jr_client = self.config.common.init_job_result().await?;
 
         for dir in &self.config.readonly_inputs {
             dir.init_pull().await?;
@@ -84,7 +85,10 @@ impl GeneratorTask {
             self.config.ensemble_sync_delay,
         );
 
-        let crash_dir_monitor = self.config.crashes.monitor_results(new_result, false);
+        let crash_dir_monitor = self
+            .config
+            .crashes
+            .monitor_results(new_result, false, &jr_client);
 
         let fuzzer = self.fuzzing_loop(hb_client);
 
@@ -298,6 +302,7 @@ mod tests {
                 task_id: Default::default(),
                 instance_id: Default::default(),
                 heartbeat_queue: Default::default(),
+                job_result_queue: Default::default(),
                 instance_telemetry_key: Default::default(),
                 microsoft_telemetry_key: Default::default(),
                 logs: Default::default(),
diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs
index 4f8c67ae8e..bfd9f3f5cc 100644
--- a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs
+++ b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs
@@ -1,7 +1,11 @@
 // Copyright (c) Microsoft Corporation.
 // Licensed under the MIT License.
 
-use crate::tasks::{config::CommonConfig, heartbeat::HeartbeatSender, utils::default_bool_true};
+use crate::tasks::{
+    config::CommonConfig,
+    heartbeat::{HeartbeatSender, TaskHeartbeatClient},
+    utils::default_bool_true,
+};
 use anyhow::{Context, Result};
 use arraydeque::{ArrayDeque, Wrapping};
 use async_trait::async_trait;
@@ -12,6 +16,7 @@ use onefuzz::{
     process::ExitStatus,
     syncdir::{continuous_sync, SyncOperation::Pull, SyncedDir},
 };
+use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient};
 use onefuzz_telemetry::{
     Event::{new_coverage, new_crashdump, new_result, runtime_stats},
     EventData,
@@ -126,21 +131,31 @@ where
         self.verify().await?;
 
         let hb_client = self.config.common.init_heartbeat(None).await?;
+        let jr_client = self.config.common.init_job_result().await?;
 
         // To be scheduled.
         let resync = self.continuous_sync_inputs();
-        let new_inputs = self.config.inputs.monitor_results(new_coverage, true);
-        let new_crashes = self.config.crashes.monitor_results(new_result, true);
+
+        let new_inputs = self
+            .config
+            .inputs
+            .monitor_results(new_coverage, true, &jr_client);
+        let new_crashes = self
+            .config
+            .crashes
+            .monitor_results(new_result, true, &jr_client);
         let new_crashdumps = async {
             if let Some(crashdumps) = &self.config.crashdumps {
-                crashdumps.monitor_results(new_crashdump, true).await
+                crashdumps
+                    .monitor_results(new_crashdump, true, &jr_client)
+                    .await
             } else {
                 Ok(())
             }
         };
 
         let (stats_sender, stats_receiver) = mpsc::unbounded_channel();
-        let report_stats = report_runtime_stats(stats_receiver, hb_client);
+        let report_stats = report_runtime_stats(stats_receiver, &hb_client, &jr_client);
         let fuzzers = self.run_fuzzers(Some(&stats_sender));
         futures::try_join!(
             resync,
@@ -183,7 +198,7 @@ where
             .inputs
             .local_path
             .parent()
-            .ok_or_else(|| anyhow!("Invalid input path"))?;
+            .ok_or_else(|| anyhow!("invalid input path"))?;
         let temp_path = task_dir.join(".temp");
         tokio::fs::create_dir_all(&temp_path).await?;
         let temp_dir = tempdir_in(temp_path)?;
@@ -501,7 +516,7 @@ impl TotalStats {
         self.execs_sec = self.worker_stats.values().map(|x| x.execs_sec).sum();
     }
 
-    fn report(&self) {
+    async fn report(&self, jr_client: &Option<TaskJobResultClient>) {
         event!(
             runtime_stats;
             EventData::Count = self.count,
@@ -513,6 +528,17 @@ impl TotalStats {
             EventData::Count = self.count,
             EventData::ExecsSecond = self.execs_sec
         );
+        if let Some(jr_client) = jr_client {
+            let _ = jr_client
+                .send_direct(
+                    JobResultData::RuntimeStats,
+                    HashMap::from([
+                        ("total_count".to_string(), self.count as f64),
+                        ("execs_sec".to_string(), self.execs_sec),
+                    ]),
+                )
+                .await;
+        }
     }
 }
 
@@ -542,7 +568,8 @@ impl Timer {
 // are approximating nearest-neighbor interpolation on the runtime stats time series.
 async fn report_runtime_stats(
     mut stats_channel: mpsc::UnboundedReceiver<RuntimeStats>,
-    heartbeat_client: impl HeartbeatSender,
+    heartbeat_client: &Option<TaskHeartbeatClient>,
+    jr_client: &Option<TaskJobResultClient>,
 ) -> Result<()> {
     // Cache the last-reported stats for a given worker.
     //
@@ -551,7 +578,7 @@ async fn report_runtime_stats(
     let mut total = TotalStats::default();
 
     // report all zeros to start
-    total.report();
+    total.report(jr_client).await;
 
     let timer = Timer::new(RUNTIME_STATS_PERIOD);
 
@@ -560,10 +587,10 @@ async fn report_runtime_stats(
             Some(stats) = stats_channel.recv() => {
                 heartbeat_client.alive();
                 total.update(stats);
-                total.report()
+                total.report(jr_client).await
             }
             _ = timer.wait() => {
-                total.report()
+                total.report(jr_client).await
             }
         }
     }
diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs
index de1e1106ba..3f00e20b8d 100644
--- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs
+++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs
@@ -79,7 +79,10 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
         remote_path: config.crashes.remote_path.clone(),
     };
     crashes.init().await?;
-    let monitor_crashes = crashes.monitor_results(new_result, false);
+
+    let jr_client = config.common.init_job_result().await?;
+
+    let monitor_crashes = crashes.monitor_results(new_result, false, &jr_client);
 
     // setup crashdumps
     let (crashdump_dir, monitor_crashdumps) = {
@@ -95,9 +98,12 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
         };
 
         let monitor_dir = crashdump_dir.clone();
+        let monitor_jr_client = config.common.init_job_result().await?;
         let monitor_crashdumps = async move {
             if let Some(crashdumps) = monitor_dir {
-                crashdumps.monitor_results(new_crashdump, false).await
+                crashdumps
+                    .monitor_results(new_crashdump, false, &monitor_jr_client)
+                    .await
             } else {
                 Ok(())
             }
@@ -129,11 +135,13 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
     if let Some(no_repro) = &config.no_repro {
         no_repro.init().await?;
     }
+
     let monitor_reports_future = monitor_reports(
         reports_dir.path(),
         &config.unique_reports,
         &config.reports,
         &config.no_repro,
+        &jr_client,
     );
 
     let inputs = SyncedDir {
@@ -156,7 +164,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> {
             delay_with_jitter(delay).await;
         }
     }
-    let monitor_inputs = inputs.monitor_results(new_coverage, false);
+    let monitor_inputs = inputs.monitor_results(new_coverage, false, &jr_client);
     let inputs_sync_cancellation = CancellationToken::new(); // never actually cancelled
     let inputs_sync_task =
         inputs.continuous_sync(Pull, config.ensemble_sync_delay, &inputs_sync_cancellation);
@@ -444,6 +452,7 @@ mod tests {
                 task_id: Default::default(),
                 instance_id: Default::default(),
                 heartbeat_queue: Default::default(),
+                job_result_queue: Default::default(),
                 instance_telemetry_key: Default::default(),
                 microsoft_telemetry_key: Default::default(),
                 logs: Default::default(),
diff --git a/src/agent/onefuzz-task/src/tasks/heartbeat.rs b/src/agent/onefuzz-task/src/tasks/heartbeat.rs
index 515fa39d0c..e13b661909 100644
--- a/src/agent/onefuzz-task/src/tasks/heartbeat.rs
+++ b/src/agent/onefuzz-task/src/tasks/heartbeat.rs
@@ -1,8 +1,8 @@
 // Copyright (c) Microsoft Corporation.
 // Licensed under the MIT License.
 
-use crate::onefuzz::heartbeat::HeartbeatClient;
 use anyhow::Result;
+use onefuzz::heartbeat::HeartbeatClient;
 use reqwest::Url;
 use serde::{self, Deserialize, Serialize};
 use std::time::Duration;
diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs
index 4f2e8234a8..3b6a2094d8 100644
--- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs
@@ -83,7 +83,7 @@ pub async fn spawn(config: &Config) -> Result<()> {
                 }
             }
         } else {
-            warn!("no new candidate inputs found, sleeping");
+            debug!("no new candidate inputs found, sleeping");
             delay_with_jitter(EMPTY_QUEUE_DELAY).await;
         };
     }
diff --git a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs
index 1c334b3f18..2d53bc8c07 100644
--- a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs
+++ b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs
@@ -120,7 +120,7 @@ async fn process_message(config: &Config, input_queue: QueueClient) -> Result<()
         }
         Ok(())
     } else {
-        warn!("no new candidate inputs found, sleeping");
+        debug!("no new candidate inputs found, sleeping");
         delay_with_jitter(EMPTY_QUEUE_DELAY).await;
         Ok(())
     }
diff --git a/src/agent/onefuzz-task/src/tasks/regression/common.rs b/src/agent/onefuzz-task/src/tasks/regression/common.rs
index 60023cfa6e..b61a97df4c 100644
--- a/src/agent/onefuzz-task/src/tasks/regression/common.rs
+++ b/src/agent/onefuzz-task/src/tasks/regression/common.rs
@@ -2,12 +2,14 @@
 // Licensed under the MIT License.
 
 use crate::tasks::{
+    config::CommonConfig,
     heartbeat::{HeartbeatSender, TaskHeartbeatClient},
     report::crash_report::{parse_report_file, CrashTestResult, RegressionReport},
 };
 use anyhow::{Context, Result};
 use async_trait::async_trait;
 use onefuzz::syncdir::SyncedDir;
+use onefuzz_result::job_result::TaskJobResultClient;
 use reqwest::Url;
 use std::path::PathBuf;
 
@@ -24,7 +26,7 @@ pub trait RegressionHandler {
 
 /// Runs the regression task
 pub async fn run(
-    heartbeat_client: Option<TaskHeartbeatClient>,
+    common_config: &CommonConfig,
     regression_reports: &SyncedDir,
     crashes: &SyncedDir,
     report_dirs: &[&SyncedDir],
@@ -35,6 +37,9 @@ pub async fn run(
     info!("starting regression task");
     regression_reports.init().await?;
 
+    let heartbeat_client = common_config.init_heartbeat(None).await?;
+    let job_result_client = common_config.init_job_result().await?;
+
     handle_crash_reports(
         handler,
         crashes,
@@ -42,6 +47,7 @@ pub async fn run(
         report_list,
         regression_reports,
         &heartbeat_client,
+        &job_result_client,
     )
     .await
     .context("handling crash reports")?;
@@ -52,6 +58,7 @@ pub async fn run(
             readonly_inputs,
             regression_reports,
             &heartbeat_client,
+            &job_result_client,
         )
         .await
         .context("handling inputs")?;
@@ -71,6 +78,7 @@ pub async fn handle_inputs(
     readonly_inputs: &SyncedDir,
     regression_reports: &SyncedDir,
     heartbeat_client: &Option<TaskHeartbeatClient>,
+    job_result_client: &Option<TaskJobResultClient>,
 ) -> Result<()> {
     readonly_inputs.init_pull().await?;
     let mut input_files = tokio::fs::read_dir(&readonly_inputs.local_path).await?;
@@ -95,7 +103,7 @@ pub async fn handle_inputs(
             crash_test_result,
             original_crash_test_result: None,
         }
-        .save(None, regression_reports)
+        .save(None, regression_reports, job_result_client)
         .await?
     }
 
@@ -109,6 +117,7 @@ pub async fn handle_crash_reports(
     report_list: &Option<Vec<String>>,
     regression_reports: &SyncedDir,
     heartbeat_client: &Option<TaskHeartbeatClient>,
+    job_result_client: &Option<TaskJobResultClient>,
 ) -> Result<()> {
     // without crash report containers, skip this method
     if report_dirs.is_empty() {
@@ -158,7 +167,7 @@ pub async fn handle_crash_reports(
                 crash_test_result,
                 original_crash_test_result: Some(original_crash_test_result),
             }
-            .save(Some(file_name), regression_reports)
+            .save(Some(file_name), regression_reports, job_result_client)
             .await?
         }
     }
diff --git a/src/agent/onefuzz-task/src/tasks/regression/generic.rs b/src/agent/onefuzz-task/src/tasks/regression/generic.rs
index 640e80db9a..8570208d59 100644
--- a/src/agent/onefuzz-task/src/tasks/regression/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/regression/generic.rs
@@ -89,7 +89,6 @@ impl GenericRegressionTask {
 
     pub async fn run(&self) -> Result<()> {
         info!("Starting generic regression task");
-        let heartbeat_client = self.config.common.init_heartbeat(None).await?;
 
         let mut report_dirs = vec![];
         for dir in vec![
@@ -103,7 +102,7 @@ impl GenericRegressionTask {
             report_dirs.push(dir);
         }
         common::run(
-            heartbeat_client,
+            &self.config.common,
             &self.config.regression_reports,
             &self.config.crashes,
             &report_dirs,
diff --git a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs
index 06dd7c00d9..e65f46bb64 100644
--- a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs
+++ b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs
@@ -103,9 +103,8 @@ impl LibFuzzerRegressionTask {
             report_dirs.push(dir);
         }
 
-        let heartbeat_client = self.config.common.init_heartbeat(None).await?;
         common::run(
-            heartbeat_client,
+            &self.config.common,
             &self.config.regression_reports,
             &self.config.crashes,
             &report_dirs,
diff --git a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs
index 23171bc432..290b98ccde 100644
--- a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs
+++ b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs
@@ -3,6 +3,7 @@
 
 use anyhow::{Context, Result};
 use onefuzz::{blob::BlobUrl, monitor::DirectoryMonitor, syncdir::SyncedDir};
+use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient};
 use onefuzz_telemetry::{
     Event::{
         new_report, new_unable_to_reproduce, new_unique_report, regression_report,
@@ -12,6 +13,7 @@ use onefuzz_telemetry::{
 };
 use serde::{Deserialize, Serialize};
 use stacktrace_parser::CrashLog;
+use std::collections::HashMap;
 use std::path::{Path, PathBuf};
 use uuid::Uuid;
 
@@ -111,6 +113,7 @@ impl RegressionReport {
         self,
         report_name: Option<String>,
         regression_reports: &SyncedDir,
+        jr_client: &Option<TaskJobResultClient>,
     ) -> Result<()> {
         let (event, name) = match &self.crash_test_result {
             CrashTestResult::CrashReport(report) => {
@@ -126,6 +129,15 @@ impl RegressionReport {
         if upload_or_save_local(&self, &name, regression_reports).await? {
             event!(event; EventData::Path = name.clone());
             metric!(event; 1.0; EventData::Path = name.clone());
+
+            if let Some(jr_client) = jr_client {
+                let _ = jr_client
+                    .send_direct(
+                        JobResultData::NewRegressionReport,
+                        HashMap::from([("count".to_string(), 1.0)]),
+                    )
+                    .await;
+            }
         }
         Ok(())
     }
@@ -149,6 +161,7 @@ impl CrashTestResult {
         unique_reports: &Option<SyncedDir>,
         reports: &Option<SyncedDir>,
         no_repro: &Option<SyncedDir>,
+        jr_client: &Option<TaskJobResultClient>,
     ) -> Result<()> {
         match self {
             Self::CrashReport(report) => {
@@ -158,6 +171,15 @@ impl CrashTestResult {
                     if upload_or_save_local(&report, &name, unique_reports).await? {
                         event!(new_unique_report; EventData::Path = report.unique_blob_name());
                         metric!(new_unique_report; 1.0; EventData::Path = report.unique_blob_name());
+
+                        if let Some(jr_client) = jr_client {
+                            let _ = jr_client
+                                .send_direct(
+                                    JobResultData::NewUniqueReport,
+                                    HashMap::from([("count".to_string(), 1.0)]),
+                                )
+                                .await;
+                        }
                     }
                 }
 
@@ -166,6 +188,15 @@ impl CrashTestResult {
                     if upload_or_save_local(&report, &name, reports).await? {
                         event!(new_report; EventData::Path = report.blob_name());
                         metric!(new_report; 1.0; EventData::Path = report.blob_name());
+
+                        if let Some(jr_client) = jr_client {
+                            let _ = jr_client
+                                .send_direct(
+                                    JobResultData::NewReport,
+                                    HashMap::from([("count".to_string(), 1.0)]),
+                                )
+                                .await;
+                        }
                     }
                 }
             }
@@ -176,6 +207,15 @@ impl CrashTestResult {
                     if upload_or_save_local(&report, &name, no_repro).await? {
                         event!(new_unable_to_reproduce; EventData::Path = report.blob_name());
                         metric!(new_unable_to_reproduce; 1.0; EventData::Path = report.blob_name());
+
+                        if let Some(jr_client) = jr_client {
+                            let _ = jr_client
+                                .send_direct(
+                                    JobResultData::NoReproCrashingInput,
+                                    HashMap::from([("count".to_string(), 1.0)]),
+                                )
+                                .await;
+                        }
                     }
                 }
             }
@@ -324,6 +364,7 @@ pub async fn monitor_reports(
     unique_reports: &Option<SyncedDir>,
     reports: &Option<SyncedDir>,
     no_crash: &Option<SyncedDir>,
+    jr_client: &Option<TaskJobResultClient>,
 ) -> Result<()> {
     if unique_reports.is_none() && reports.is_none() && no_crash.is_none() {
         debug!("no report directories configured");
@@ -334,7 +375,9 @@ pub async fn monitor_reports(
 
     while let Some(file) = monitor.next_file().await? {
         let result = parse_report_file(file).await?;
-        result.save(unique_reports, reports, no_crash).await?;
+        result
+            .save(unique_reports, reports, no_crash, jr_client)
+            .await?;
     }
 
     Ok(())
diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs
index 9b626a7d89..b8659845de 100644
--- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs
@@ -8,25 +8,25 @@ use std::{
     sync::Arc,
 };
 
+use crate::tasks::report::crash_report::*;
+use crate::tasks::report::dotnet::common::collect_exception_info;
+use crate::tasks::{
+    config::CommonConfig,
+    generic::input_poller::*,
+    heartbeat::{HeartbeatSender, TaskHeartbeatClient},
+    utils::{default_bool_true, try_resolve_setup_relative_path},
+};
 use anyhow::{Context, Result};
 use async_trait::async_trait;
 use onefuzz::expand::Expand;
 use onefuzz::fs::set_executable;
 use onefuzz::{blob::BlobUrl, sha256, syncdir::SyncedDir};
+use onefuzz_result::job_result::TaskJobResultClient;
 use reqwest::Url;
 use serde::Deserialize;
 use storage_queue::{Message, QueueClient};
 use tokio::fs;
 
-use crate::tasks::report::crash_report::*;
-use crate::tasks::report::dotnet::common::collect_exception_info;
-use crate::tasks::{
-    config::CommonConfig,
-    generic::input_poller::*,
-    heartbeat::{HeartbeatSender, TaskHeartbeatClient},
-    utils::{default_bool_true, try_resolve_setup_relative_path},
-};
-
 const DOTNET_DUMP_TOOL_NAME: &str = "dotnet-dump";
 
 #[derive(Debug, Deserialize)]
@@ -114,15 +114,18 @@ impl DotnetCrashReportTask {
 pub struct AsanProcessor {
     config: Arc<Config>,
     heartbeat_client: Option<TaskHeartbeatClient>,
+    job_result_client: Option<TaskJobResultClient>,
 }
 
 impl AsanProcessor {
     pub async fn new(config: Arc<Config>) -> Result<Self> {
         let heartbeat_client = config.common.init_heartbeat(None).await?;
+        let job_result_client = config.common.init_job_result().await?;
 
         Ok(Self {
             config,
             heartbeat_client,
+            job_result_client,
         })
     }
 
@@ -260,6 +263,7 @@ impl Processor for AsanProcessor {
                 &self.config.unique_reports,
                 &self.config.reports,
                 &self.config.no_repro,
+                &self.job_result_client,
             )
             .await;
 
diff --git a/src/agent/onefuzz-task/src/tasks/report/generic.rs b/src/agent/onefuzz-task/src/tasks/report/generic.rs
index 9088f98acc..8ad259f0a5 100644
--- a/src/agent/onefuzz-task/src/tasks/report/generic.rs
+++ b/src/agent/onefuzz-task/src/tasks/report/generic.rs
@@ -13,6 +13,7 @@ use async_trait::async_trait;
 use onefuzz::{
     blob::BlobUrl, input_tester::Tester, machine_id::MachineIdentity, sha256, syncdir::SyncedDir,
 };
+use onefuzz_result::job_result::TaskJobResultClient;
 use reqwest::Url;
 use serde::Deserialize;
 use std::{
@@ -73,7 +74,9 @@ impl ReportTask {
     pub async fn managed_run(&mut self) -> Result<()> {
         info!("Starting generic crash report task");
         let heartbeat_client = self.config.common.init_heartbeat(None).await?;
-        let mut processor = GenericReportProcessor::new(&self.config, heartbeat_client);
+        let job_result_client = self.config.common.init_job_result().await?;
+        let mut processor =
+            GenericReportProcessor::new(&self.config, heartbeat_client, job_result_client);
 
         #[allow(clippy::manual_flatten)]
         for entry in [
@@ -183,13 +186,19 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result<CrashTestResult> {
 pub struct GenericReportProcessor<'a> {
     config: &'a Config,
     heartbeat_client: Option<TaskHeartbeatClient>,
+    job_result_client: Option<TaskJobResultClient>,
 }
 
 impl<'a> GenericReportProcessor<'a> {
-    pub fn new(config: &'a Config, heartbeat_client: Option<TaskHeartbeatClient>) -> Self {
+    pub fn new(
+        config: &'a Config,
+        heartbeat_client: Option<TaskHeartbeatClient>,
+        job_result_client: Option<TaskJobResultClient>,
+    ) -> Self {
         Self {
             config,
             heartbeat_client,
+            job_result_client,
         }
     }
 
@@ -239,6 +248,7 @@ impl<'a> Processor for GenericReportProcessor<'a> {
                 &self.config.unique_reports,
                 &self.config.reports,
                 &self.config.no_repro,
+                &self.job_result_client,
             )
             .await
             .context("saving report failed")
diff --git a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs
index f18f638fa3..587ed2e3dc 100644
--- a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs
+++ b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs
@@ -13,6 +13,7 @@ use async_trait::async_trait;
 use onefuzz::{
     blob::BlobUrl, libfuzzer::LibFuzzer, machine_id::MachineIdentity, sha256, syncdir::SyncedDir,
 };
+use onefuzz_result::job_result::TaskJobResultClient;
 use reqwest::Url;
 use serde::Deserialize;
 use std::{
@@ -196,15 +197,18 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result<CrashTestResult> {
 pub struct AsanProcessor {
     config: Arc<Config>,
     heartbeat_client: Option<TaskHeartbeatClient>,
+    job_result_client: Option<TaskJobResultClient>,
 }
 
 impl AsanProcessor {
     pub async fn new(config: Arc<Config>) -> Result<Self> {
         let heartbeat_client = config.common.init_heartbeat(None).await?;
+        let job_result_client = config.common.init_job_result().await?;
 
         Ok(Self {
             config,
             heartbeat_client,
+            job_result_client,
         })
     }
 
@@ -257,6 +261,7 @@ impl Processor for AsanProcessor {
                 &self.config.unique_reports,
                 &self.config.reports,
                 &self.config.no_repro,
+                &self.job_result_client,
             )
             .await
     }
diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml
index c096c8ddfc..1f3c27985c 100644
--- a/src/agent/onefuzz/Cargo.toml
+++ b/src/agent/onefuzz/Cargo.toml
@@ -44,6 +44,7 @@ tempfile = "3.7.0"
 process_control = "4.0"
 reqwest-retry = { path = "../reqwest-retry" }
 onefuzz-telemetry = { path = "../onefuzz-telemetry" }
+onefuzz-result = { path = "../onefuzz-result" }
 stacktrace-parser = { path = "../stacktrace-parser" }
 backoff = { version = "0.4", features = ["tokio"] }
 
diff --git a/src/agent/onefuzz/src/blob/url.rs b/src/agent/onefuzz/src/blob/url.rs
index f55ffbb23a..134b59dea0 100644
--- a/src/agent/onefuzz/src/blob/url.rs
+++ b/src/agent/onefuzz/src/blob/url.rs
@@ -192,10 +192,15 @@ impl BlobContainerUrl {
     }
 
     pub fn as_path(&self, prefix: impl AsRef<Path>) -> Result<PathBuf> {
-        let dir = self
-            .account()
-            .ok_or_else(|| anyhow!("Invalid container Url"))?;
-        Ok(prefix.as_ref().join(dir))
+        match (self.account(), self.container()) {
+            (Some(account), Some(container)) => {
+                let mut path = PathBuf::new();
+                path.push(account);
+                path.push(container);
+                Ok(prefix.as_ref().join(path))
+            }
+            _ => bail!("Invalid container Url"),
+        }
     }
 }
 
@@ -526,4 +531,14 @@ mod tests {
             "id:000000,sig:06,src:000000,op:havoc,rep:128"
         );
     }
+
+    #[test]
+    fn test_as_path() -> Result<()> {
+        let root = PathBuf::from(r"/onefuzz");
+        let url = BlobContainerUrl::parse("https://myaccount.blob.core.windows.net/mycontainer")?;
+        let path = url.as_path(root)?;
+        assert_eq!(PathBuf::from(r"/onefuzz/myaccount/mycontainer"), path);
+
+        Ok(())
+    }
 }
diff --git a/src/agent/onefuzz/src/syncdir.rs b/src/agent/onefuzz/src/syncdir.rs
index 0252099561..2e73b7a694 100644
--- a/src/agent/onefuzz/src/syncdir.rs
+++ b/src/agent/onefuzz/src/syncdir.rs
@@ -11,10 +11,12 @@ use crate::{
 };
 use anyhow::{Context, Result};
 use dunce::canonicalize;
+use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient};
 use onefuzz_telemetry::{Event, EventData};
 use reqwest::{StatusCode, Url};
 use reqwest_retry::{RetryCheck, SendRetry, DEFAULT_RETRY_PERIOD, MAX_RETRY_ATTEMPTS};
 use serde::{Deserialize, Serialize};
+use std::collections::HashMap;
 use std::{env::current_dir, path::PathBuf, str, time::Duration};
 use tokio::{fs, select};
 use tokio_util::sync::CancellationToken;
@@ -241,6 +243,7 @@ impl SyncedDir {
         url: BlobContainerUrl,
         event: Event,
         ignore_dotfiles: bool,
+        jr_client: &Option<TaskJobResultClient>,
     ) -> Result<()> {
         debug!("monitoring {}", path.display());
 
@@ -265,9 +268,39 @@ impl SyncedDir {
                 if ignore_dotfiles && file_name_event_str.starts_with('.') {
                     continue;
                 }
-
                 event!(event.clone(); EventData::Path = file_name_event_str);
                 metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str);
+                if let Some(jr_client) = jr_client {
+                    match event {
+                        Event::new_result => {
+                            jr_client
+                                .send_direct(
+                                    JobResultData::NewCrashingInput,
+                                    HashMap::from([("count".to_string(), 1.0)]),
+                                )
+                                .await;
+                        }
+                        Event::new_coverage => {
+                            jr_client
+                                .send_direct(
+                                    JobResultData::CoverageData,
+                                    HashMap::from([("count".to_string(), 1.0)]),
+                                )
+                                .await;
+                        }
+                        Event::new_crashdump => {
+                            jr_client
+                                .send_direct(
+                                    JobResultData::NewCrashDump,
+                                    HashMap::from([("count".to_string(), 1.0)]),
+                                )
+                                .await;
+                        }
+                        _ => {
+                            warn!("Unhandled job result!");
+                        }
+                    }
+                }
                 let destination = path.join(file_name);
                 if let Err(err) = fs::copy(&item, &destination).await {
                     let error_message = format!(
@@ -305,6 +338,29 @@ impl SyncedDir {
 
                 event!(event.clone(); EventData::Path = file_name_event_str);
                 metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str);
+                if let Some(jr_client) = jr_client {
+                    match event {
+                        Event::new_result => {
+                            jr_client
+                                .send_direct(
+                                    JobResultData::NewCrashingInput,
+                                    HashMap::from([("count".to_string(), 1.0)]),
+                                )
+                                .await;
+                        }
+                        Event::new_coverage => {
+                            jr_client
+                                .send_direct(
+                                    JobResultData::CoverageData,
+                                    HashMap::from([("count".to_string(), 1.0)]),
+                                )
+                                .await;
+                        }
+                        _ => {
+                            warn!("Unhandled job result!");
+                        }
+                    }
+                }
                 if let Err(err) = uploader.upload(item.clone()).await {
                     let error_message = format!(
                         "Couldn't upload file.  path:{} dir:{} err:{:?}",
@@ -336,7 +392,12 @@ impl SyncedDir {
     /// The intent of this is to support use cases where we usually want a directory
     /// to be initialized, but a user-supplied binary, (such as AFL) logically owns
     /// a directory, and may reset it.
-    pub async fn monitor_results(&self, event: Event, ignore_dotfiles: bool) -> Result<()> {
+    pub async fn monitor_results(
+        &self,
+        event: Event,
+        ignore_dotfiles: bool,
+        job_result_client: &Option<TaskJobResultClient>,
+    ) -> Result<()> {
         if let Some(url) = self.remote_path.clone() {
             loop {
                 debug!("waiting to monitor {}", self.local_path.display());
@@ -355,6 +416,7 @@ impl SyncedDir {
                     url.clone(),
                     event.clone(),
                     ignore_dotfiles,
+                    job_result_client,
                 )
                 .await?;
             }
diff --git a/src/deployment/bicep-templates/storageAccounts.bicep b/src/deployment/bicep-templates/storageAccounts.bicep
index 6a96cea6a0..27f2da21d8 100644
--- a/src/deployment/bicep-templates/storageAccounts.bicep
+++ b/src/deployment/bicep-templates/storageAccounts.bicep
@@ -33,7 +33,7 @@ var storageAccountFuncQueuesParams = [
   'update-queue'
   'webhooks'
   'signalr-events'
-  'custom-metrics'
+  'job-result'
 ]
 var fileChangesQueueIndex = 0
 
diff --git a/src/integration-tests/integration-test.py b/src/integration-tests/integration-test.py
index 057404ceff..15ffcfb9fe 100755
--- a/src/integration-tests/integration-test.py
+++ b/src/integration-tests/integration-test.py
@@ -88,6 +88,7 @@ class Integration(BaseModel):
     target_method: Optional[str]
     setup_dir: Optional[str]
     target_env: Optional[Dict[str, str]]
+    pool: PoolName
 
 
 TARGETS: Dict[str, Integration] = {
@@ -97,6 +98,7 @@ class Integration(BaseModel):
         target_exe="fuzz.exe",
         inputs="seeds",
         wait_for_files={ContainerType.unique_reports: 1},
+        pool="linux",
     ),
     "linux-libfuzzer": Integration(
         template=TemplateType.libfuzzer,
@@ -124,6 +126,7 @@ class Integration(BaseModel):
             "--only_asan_failures",
             "--write_test_file={extra_output_dir}/test.txt",
         ],
+        pool="linux",
     ),
     "linux-libfuzzer-with-options": Integration(
         template=TemplateType.libfuzzer,
@@ -137,6 +140,7 @@ class Integration(BaseModel):
         },
         reboot_after_setup=True,
         fuzzing_target_options=["-runs=10000000"],
+        pool="linux",
     ),
     "linux-libfuzzer-dlopen": Integration(
         template=TemplateType.libfuzzer,
@@ -150,6 +154,7 @@ class Integration(BaseModel):
         },
         reboot_after_setup=True,
         use_setup=True,
+        pool="linux",
     ),
     "linux-libfuzzer-linked-library": Integration(
         template=TemplateType.libfuzzer,
@@ -163,6 +168,7 @@ class Integration(BaseModel):
         },
         reboot_after_setup=True,
         use_setup=True,
+        pool="linux",
     ),
     "linux-libfuzzer-dotnet": Integration(
         template=TemplateType.libfuzzer_dotnet,
@@ -180,6 +186,7 @@ class Integration(BaseModel):
             ContainerType.unique_reports: 1,
         },
         test_repro=False,
+        pool="linux",
     ),
     "linux-libfuzzer-aarch64-crosscompile": Integration(
         template=TemplateType.libfuzzer_qemu_user,
@@ -189,6 +196,7 @@ class Integration(BaseModel):
         use_setup=True,
         wait_for_files={ContainerType.inputs: 2, ContainerType.crashes: 1},
         test_repro=False,
+        pool="linux",
     ),
     "linux-libfuzzer-rust": Integration(
         template=TemplateType.libfuzzer,
@@ -196,6 +204,7 @@ class Integration(BaseModel):
         target_exe="fuzz_target_1",
         wait_for_files={ContainerType.unique_reports: 1, ContainerType.coverage: 1},
         fuzzing_target_options=["--test:{extra_setup_dir}"],
+        pool="linux",
     ),
     "linux-trivial-crash": Integration(
         template=TemplateType.radamsa,
@@ -204,6 +213,7 @@ class Integration(BaseModel):
         inputs="seeds",
         wait_for_files={ContainerType.unique_reports: 1},
         inject_fake_regression=True,
+        pool="linux",
     ),
     "linux-trivial-crash-asan": Integration(
         template=TemplateType.radamsa,
@@ -213,6 +223,28 @@ class Integration(BaseModel):
         wait_for_files={ContainerType.unique_reports: 1},
         check_asan_log=True,
         disable_check_debugger=True,
+        pool="linux",
+    ),
+    # TODO: Don't install OMS extension on linux anymore
+    # TODO: Figure out why non mariner work is being scheduled to the mariner pool
+    "mariner-libfuzzer": Integration(
+        template=TemplateType.libfuzzer,
+        os=OS.linux,
+        target_exe="fuzz.exe",
+        inputs="seeds",
+        wait_for_files={
+            ContainerType.unique_reports: 1,
+            ContainerType.coverage: 1,
+            ContainerType.inputs: 2,
+            ContainerType.extra_output: 1,
+        },
+        reboot_after_setup=True,
+        inject_fake_regression=True,
+        fuzzing_target_options=[
+            "--test:{extra_setup_dir}",
+            "--write_test_file={extra_output_dir}/test.txt",
+        ],
+        pool=PoolName("mariner")
     ),
     "windows-libfuzzer": Integration(
         template=TemplateType.libfuzzer,
@@ -234,6 +266,7 @@ class Integration(BaseModel):
             "--only_asan_failures",
             "--write_test_file={extra_output_dir}/test.txt",
         ],
+        pool="windows",
     ),
     "windows-libfuzzer-linked-library": Integration(
         template=TemplateType.libfuzzer,
@@ -246,6 +279,7 @@ class Integration(BaseModel):
             ContainerType.coverage: 1,
         },
         use_setup=True,
+        pool="windows",
     ),
     "windows-libfuzzer-load-library": Integration(
         template=TemplateType.libfuzzer,
@@ -258,6 +292,7 @@ class Integration(BaseModel):
             ContainerType.coverage: 1,
         },
         use_setup=True,
+        pool="windows",
     ),
     "windows-libfuzzer-dotnet": Integration(
         template=TemplateType.libfuzzer_dotnet,
@@ -275,6 +310,7 @@ class Integration(BaseModel):
             ContainerType.unique_reports: 1,
         },
         test_repro=False,
+        pool="windows",
     ),
     "windows-trivial-crash": Integration(
         template=TemplateType.radamsa,
@@ -283,6 +319,7 @@ class Integration(BaseModel):
         inputs="seeds",
         wait_for_files={ContainerType.unique_reports: 1},
         inject_fake_regression=True,
+        pool="windows",
     ),
 }
 
@@ -351,7 +388,7 @@ def try_info_get(data: Any) -> None:
 
         self.inject_log(self.start_log_marker)
         for entry in os_list:
-            name = PoolName(f"testpool-{entry.name}-{self.test_id}")
+            name = self.build_pool_name(entry.name)
             self.logger.info("creating pool: %s:%s", entry.name, name)
             self.of.pools.create(name, entry)
             self.logger.info("creating scaleset for pool: %s", name)
@@ -359,6 +396,15 @@ def try_info_get(data: Any) -> None:
                 name, pool_size, region=region, initial_size=pool_size
             )
 
+        name = self.build_pool_name("mariner")
+        self.logger.info("creating pool: %s:%s", "mariner", name)
+        self.of.pools.create(name, OS.linux)
+        self.logger.info("creating scaleset for pool: %s", name)
+        self.of.scalesets.create(
+            name, pool_size, region=region, initial_size=pool_size, image="MicrosoftCBLMariner:cbl-mariner:cbl-mariner-2-gen2:latest"
+        )
+
+
     class UnmanagedPool:
         def __init__(
             self,
@@ -560,12 +606,9 @@ def launch(
     ) -> List[UUID]:
         """Launch all of the fuzzing templates"""
 
-        pools: Dict[OS, Pool] = {}
+        pool = None
         if unmanaged_pool is not None:
-            pools[unmanaged_pool.the_os] = self.of.pools.get(unmanaged_pool.pool_name)
-        else:
-            for pool in self.of.pools.list():
-                pools[pool.os] = pool
+            pool = unmanaged_pool.pool_name
 
         job_ids = []
 
@@ -576,8 +619,8 @@ def launch(
             if config.os not in os_list:
                 continue
 
-            if config.os not in pools.keys():
-                raise Exception(f"No pool for target: {target} ,os: {config.os}")
+            if pool is None:
+                pool = self.build_pool_name(config.pool)
 
             self.logger.info("launching: %s", target)
 
@@ -601,8 +644,9 @@ def launch(
                 setup = Directory(os.path.join(setup, config.nested_setup_dir))
 
             job: Optional[Job] = None
+                
             job = self.build_job(
-                duration, pools, target, config, setup, target_exe, inputs
+                duration, pool, target, config, setup, target_exe, inputs
             )
 
             if config.inject_fake_regression and job is not None:
@@ -618,7 +662,7 @@ def launch(
     def build_job(
         self,
         duration: int,
-        pools: Dict[OS, Pool],
+        pool: PoolName,
         target: str,
         config: Integration,
         setup: Optional[Directory],
@@ -634,7 +678,7 @@ def build_job(
                 self.project,
                 target,
                 BUILD,
-                pools[config.os].name,
+                pool,
                 target_exe=target_exe,
                 inputs=inputs,
                 setup_dir=setup,
@@ -659,7 +703,7 @@ def build_job(
                 self.project,
                 target,
                 BUILD,
-                pools[config.os].name,
+                pool,
                 target_dll=File(config.target_exe),
                 inputs=inputs,
                 setup_dir=setup,
@@ -675,7 +719,7 @@ def build_job(
                 self.project,
                 target,
                 BUILD,
-                pools[config.os].name,
+                pool,
                 inputs=inputs,
                 target_exe=target_exe,
                 duration=duration,
@@ -688,7 +732,7 @@ def build_job(
                 self.project,
                 target,
                 BUILD,
-                pool_name=pools[config.os].name,
+                pool_name=pool,
                 target_exe=target_exe,
                 inputs=inputs,
                 setup_dir=setup,
@@ -703,7 +747,7 @@ def build_job(
                 self.project,
                 target,
                 BUILD,
-                pool_name=pools[config.os].name,
+                pool_name=pool,
                 target_exe=target_exe,
                 inputs=inputs,
                 setup_dir=setup,
@@ -1233,6 +1277,9 @@ def check_logs_for_errors(self) -> None:
 
         if seen_errors:
             raise Exception("logs included errors")
+        
+    def build_pool_name(self, os_type: str) -> PoolName:
+        return PoolName(f"testpool-{os_type}-{self.test_id}")
 
 
 class Run(Command):
diff --git a/src/runtime-tools/linux/setup.sh b/src/runtime-tools/linux/setup.sh
old mode 100755
new mode 100644
index f6859003b4..794e827f4d
--- a/src/runtime-tools/linux/setup.sh
+++ b/src/runtime-tools/linux/setup.sh
@@ -18,6 +18,14 @@ export DOTNET_CLI_HOME="$DOTNET_ROOT"
 export ONEFUZZ_ROOT=/onefuzz
 export LLVM_SYMBOLIZER_PATH=/onefuzz/bin/llvm-symbolizer
 
+# `logger` won't work on mariner unless we install this package first
+if type yum > /dev/null 2> /dev/null; then
+    until yum install -y util-linux sudo; do
+        echo "yum failed.  sleep 10s, then retrying"
+        sleep 10
+    done
+fi
+
 logger "onefuzz: making directories"
 sudo mkdir -p /onefuzz/downloaded
 sudo chown -R $(whoami) /onefuzz
@@ -134,31 +142,53 @@ if type apt > /dev/null 2> /dev/null; then
         sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH
     fi
 
-    # Install dotnet
+    # Needed to install dotnet
     until sudo apt install -y curl libicu-dev; do
         logger "apt failed, sleeping 10s then retrying"
         sleep 10
     done
+elif type yum > /dev/null 2> /dev/null; then
+    until yum install -y gdb gdb-gdbserver libunwind awk ca-certificates tar yum-utils shadow-utils cronie procps; do
+        echo "yum failed.  sleep 10s, then retrying"
+        sleep 10
+    done
+
+    # Install updated Microsoft Open Management Infrastructure - github.com/microsoft/omi
+    yum-config-manager --add-repo=https://packages.microsoft.com/config/rhel/8/prod.repo 2>&1 | logger -s -i -t 'onefuzz-OMI-add-MS-repo'
+    yum install -y omi 2>&1 | logger -s -i -t 'onefuzz-OMI-install'
 
-    logger "downloading dotnet install"
-    curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install'
-    chmod +x dotnet-install.sh
 
-    for version in "${DOTNET_VERSIONS[@]}"; do
-        logger "running dotnet install $version"
-        /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup'
-    done
-    rm dotnet-install.sh
-
-    logger "install dotnet tools"
-    pushd "$DOTNET_ROOT"
-    ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
-    "$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
-    "$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
-    "$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
-    popd
+    if ! [ -f ${LLVM_SYMBOLIZER_PATH} ]; then
+        until yum install -y llvm-12.0.1; do
+            echo "yum failed, sleeping 10s then retrying"
+            sleep 10
+        done
+
+        # If specifying symbolizer, exe name must be a "known symbolizer".
+        # Using `llvm-symbolizer` works for clang 8 .. 12.
+        sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH
+    fi   
 fi
 
+# Install dotnet
+logger "downloading dotnet install"
+curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install'
+chmod +x dotnet-install.sh
+
+for version in "${DOTNET_VERSIONS[@]}"; do
+    logger "running dotnet install $version"
+    /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup'
+done
+rm dotnet-install.sh
+
+logger "install dotnet tools"
+pushd "$DOTNET_ROOT"
+ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
+"$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
+"$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
+"$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools'
+popd
+
 if  [ -v DOCKER_BUILD ]; then
     echo "building for docker"
 elif [ -d /etc/systemd/system ]; then

From 7b404025aa8f5e371721c141b13c2f63a7dfec94 Mon Sep 17 00:00:00 2001
From: Adam <103067949+AdamL-Microsoft@users.noreply.github.com>
Date: Wed, 30 Aug 2023 15:18:51 -0700
Subject: [PATCH 03/16] Redo 8.7.1 (#3469)

* Redo-8.7.1-hotfix

---------

Co-authored-by: Cheick Keita <chkeita@microsoft.com>
---
 CHANGELOG.md    | 6 ++++++
 CURRENT_VERSION | 2 +-
 2 files changed, 7 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index be4779ad77..8d46ea2a0e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,12 @@ All notable changes to this project will be documented in this file.
 The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
 and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
 
+## 8.7.1
+
+### Fixed
+
+* Service: Removed deprecated Azure retention policy setting that was causing scaleset deployment errors [#3452](https://github.com/microsoft/onefuzz/pull/3452)
+
 ## 8.7.0
 
 ### Added
diff --git a/CURRENT_VERSION b/CURRENT_VERSION
index c0bcaebe8f..efeecbe2c5 100644
--- a/CURRENT_VERSION
+++ b/CURRENT_VERSION
@@ -1 +1 @@
-8.7.0
\ No newline at end of file
+8.7.1
\ No newline at end of file

From d99960323de7336e0463f92babb80ebc841fa627 Mon Sep 17 00:00:00 2001
From: Kanan B <32438208+kananb@users.noreply.github.com>
Date: Wed, 30 Aug 2023 16:40:42 -0700
Subject: [PATCH 04/16] Support custom ado fields that mark work items as
 duplicate (#3467)

* Add field to ado config for checking duplicate work items

* Make duplicate fields nullable and add it to python models

* Update broken tests

* Update docs to include new ado_duplicate_fields property
---
 .../ado-work-items.json                             |  4 ++++
 docs/notifications/ado.md                           |  7 +++++++
 src/ApiService/ApiService/OneFuzzTypes/Model.cs     |  4 +++-
 .../ApiService/onefuzzlib/notifications/Ado.cs      | 13 +++++++++----
 .../JinjaToScribanMigrationTests.cs                 |  2 ++
 src/ApiService/Tests/OrmModelsTest.cs               |  2 ++
 src/pytypes/onefuzztypes/models.py                  |  1 +
 7 files changed, 28 insertions(+), 5 deletions(-)

diff --git a/contrib/onefuzz-job-azure-devops-pipeline/ado-work-items.json b/contrib/onefuzz-job-azure-devops-pipeline/ado-work-items.json
index eb89fc019d..034d97cf15 100644
--- a/contrib/onefuzz-job-azure-devops-pipeline/ado-work-items.json
+++ b/contrib/onefuzz-job-azure-devops-pipeline/ado-work-items.json
@@ -13,6 +13,10 @@
       "System.AreaPath": "OneFuzz-Ado-Integration",
       "System.Title": "{{report.task_id}}"
     },
+    "ado_duplicate_fields": {
+      "System.Reason": "My custom value that means a work item is a duplicate",
+      "Custom.Work.Item.Field": "My custom value that means a work item is a duplicate"
+    },
     "on_duplicate": {
       "increment": [],
       "comment": "DUP {{report.input_sha256}} <br> Repro Command: <br> <pre> {{ repro_cmd }} </pre> ",
diff --git a/docs/notifications/ado.md b/docs/notifications/ado.md
index 131986afba..09dd5b9072 100644
--- a/docs/notifications/ado.md
+++ b/docs/notifications/ado.md
@@ -51,6 +51,13 @@ clickable, make it a link.
       "System.Title": "{{ report.crash_site }} - {{ report.executable }}",
       "Microsoft.VSTS.TCM.ReproSteps": "This is my call stack: <ul> {{ for item in report.call_stack }} <li> {{ item }} </li> {{ end }} </ul>"
     },
+    "ado_duplicate_fields": {
+      "System.Reason": "My custom value that means a work item is a duplicate",
+      "Custom.Work.Item.Field": "My custom value that means a work item is a duplicate"
+      // note: the fields and values below are checked by default and don't need to be specified
+      // "System.Reason": "Duplicate"
+      // "Microsoft.VSTS.Common.ResolvedReason": "Duplicate"
+    },
     "comment": "This is my comment. {{ report.input_sha256 }} {{ input_url }} <br> <pre>{{ repro_cmd }}</pre>",
     "unique_fields": ["System.Title", "System.AreaPath"],
     "on_duplicate": {
diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs
index b839f52ddc..424669899a 100644
--- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs
+++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs
@@ -689,6 +689,7 @@ public record AdoTemplate(
     List<string> UniqueFields,
     Dictionary<string, string> AdoFields,
     ADODuplicateTemplate OnDuplicate,
+    Dictionary<string, string>? AdoDuplicateFields = null,
     string? Comment = null
     ) : NotificationTemplate {
     public async Task<OneFuzzResultVoid> Validate() {
@@ -704,8 +705,9 @@ public record RenderedAdoTemplate(
     List<string> UniqueFields,
     Dictionary<string, string> AdoFields,
     ADODuplicateTemplate OnDuplicate,
+    Dictionary<string, string>? AdoDuplicateFields = null,
     string? Comment = null
-    ) : AdoTemplate(BaseUrl, AuthToken, Project, Type, UniqueFields, AdoFields, OnDuplicate, Comment);
+    ) : AdoTemplate(BaseUrl, AuthToken, Project, Type, UniqueFields, AdoFields, OnDuplicate, AdoDuplicateFields, Comment);
 
 public record TeamsTemplate(SecretData<string> Url) : NotificationTemplate {
     public Task<OneFuzzResultVoid> Validate() {
diff --git a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs
index e05bb9bc24..98b857c9bc 100644
--- a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs
+++ b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs
@@ -239,7 +239,7 @@ private static async Async.Task ProcessNotification(IOnefuzzContext context, Con
 
         var renderedConfig = RenderAdoTemplate(logTracer, renderer, config, instanceUrl);
         var ado = new AdoConnector(renderedConfig, project!, client, instanceUrl, logTracer, await GetValidFields(client, project));
-        await ado.Process(notificationInfo);
+        await ado.Process(notificationInfo, config.AdoDuplicateFields);
     }
 
     public static RenderedAdoTemplate RenderAdoTemplate(ILogger logTracer, Renderer renderer, AdoTemplate original, Uri instanceUrl) {
@@ -291,6 +291,7 @@ public static RenderedAdoTemplate RenderAdoTemplate(ILogger logTracer, Renderer
             original.UniqueFields,
             adoFields,
             onDuplicate,
+            original.AdoDuplicateFields,
             original.Comment != null ? Render(renderer, original.Comment, instanceUrl, logTracer) : null
         );
     }
@@ -525,7 +526,7 @@ private async Async.Task<WorkItem> CreateNew() {
             return (taskType, document);
         }
 
-        public async Async.Task Process(IList<(string, string)> notificationInfo) {
+        public async Async.Task Process(IList<(string, string)> notificationInfo, Dictionary<string, string>? duplicateFields) {
             var updated = false;
             WorkItem? oldestWorkItem = null;
             await foreach (var workItem in ExistingWorkItems(notificationInfo)) {
@@ -535,7 +536,7 @@ public async Async.Task Process(IList<(string, string)> notificationInfo) {
                     _logTracer.AddTags(new List<(string, string)> { ("MatchingWorkItemIds", $"{workItem.Id}") });
                     _logTracer.LogInformation("Found matching work item");
                 }
-                if (IsADODuplicateWorkItem(workItem)) {
+                if (IsADODuplicateWorkItem(workItem, duplicateFields)) {
                     continue;
                 }
 
@@ -575,13 +576,17 @@ public async Async.Task Process(IList<(string, string)> notificationInfo) {
             }
         }
 
-        private static bool IsADODuplicateWorkItem(WorkItem wi) {
+        private static bool IsADODuplicateWorkItem(WorkItem wi, Dictionary<string, string>? duplicateFields) {
             // A work item could have System.State == Resolve && System.Reason == Duplicate
             // OR it could have System.State == Closed && System.Reason == Duplicate
             // I haven't found any other combinations where System.Reason could be duplicate but just to be safe
             // we're explicitly _not_ checking the state of the work item to determine if it's duplicate
             return wi.Fields.ContainsKey("System.Reason") && string.Equals(wi.Fields["System.Reason"].ToString(), "Duplicate", StringComparison.OrdinalIgnoreCase)
             || wi.Fields.ContainsKey("Microsoft.VSTS.Common.ResolvedReason") && string.Equals(wi.Fields["Microsoft.VSTS.Common.ResolvedReason"].ToString(), "Duplicate", StringComparison.OrdinalIgnoreCase)
+            || duplicateFields?.Any(fieldPair => {
+                var (field, value) = fieldPair;
+                return wi.Fields.ContainsKey(field) && string.Equals(wi.Fields[field].ToString(), value, StringComparison.OrdinalIgnoreCase);
+            }) == true
             // Alternatively, the work item can also specify a 'relation' to another work item.
             // This is typically used to create parent/child relationships between work items but can also
             // Be used to mark duplicates so we should check this as well.
diff --git a/src/ApiService/IntegrationTests/JinjaToScribanMigrationTests.cs b/src/ApiService/IntegrationTests/JinjaToScribanMigrationTests.cs
index 0ae3b11cb5..4033a05369 100644
--- a/src/ApiService/IntegrationTests/JinjaToScribanMigrationTests.cs
+++ b/src/ApiService/IntegrationTests/JinjaToScribanMigrationTests.cs
@@ -111,6 +111,7 @@ public async Async.Task OptionalFieldsAreSupported() {
                 },
                 "{{ if org }} blah {{ end }}"
             ),
+            null,
             "{{ if org }} blah {{ end }}"
         );
 
@@ -137,6 +138,7 @@ public async Async.Task All_ADO_Fields_Are_Migrated() {
                 },
                 "{% if org %} comment {% endif %}"
             ),
+            null,
             "{% if org %} comment {% endif %}"
         );
 
diff --git a/src/ApiService/Tests/OrmModelsTest.cs b/src/ApiService/Tests/OrmModelsTest.cs
index 1aa7d2d163..956d0c30c5 100644
--- a/src/ApiService/Tests/OrmModelsTest.cs
+++ b/src/ApiService/Tests/OrmModelsTest.cs
@@ -232,6 +232,7 @@ from authToken in Arb.Generate<SecretData<string>>()
                         from str in Arb.Generate<NonEmptyString>()
                         from fields in Arb.Generate<List<string>>()
                         from adoFields in Arb.Generate<Dictionary<string, string>>()
+                        from adoDuplicateFields in Arb.Generate<Dictionary<string, string>>()
                         from dupeTemplate in Arb.Generate<ADODuplicateTemplate>()
                         select new AdoTemplate(
                             baseUrl,
@@ -241,6 +242,7 @@ from dupeTemplate in Arb.Generate<ADODuplicateTemplate>()
                             fields,
                             adoFields,
                             dupeTemplate,
+                            adoDuplicateFields,
                             str.Get));
 
         public static Arbitrary<TeamsTemplate> ArbTeamsTemplate()
diff --git a/src/pytypes/onefuzztypes/models.py b/src/pytypes/onefuzztypes/models.py
index a5f8139e97..c888621600 100644
--- a/src/pytypes/onefuzztypes/models.py
+++ b/src/pytypes/onefuzztypes/models.py
@@ -273,6 +273,7 @@ class ADOTemplate(BaseModel):
     unique_fields: List[str]
     comment: Optional[str]
     ado_fields: Dict[str, str]
+    ado_duplicate_fields: Optional[Dict[str, str]]
     on_duplicate: ADODuplicateTemplate
 
     # validator needed to convert auth_token to SecretData

From b2435b1aea7e653f8e90142904c504a61ffcca1e Mon Sep 17 00:00:00 2001
From: Marc Greisen <mgreisen@microsoft.com>
Date: Thu, 31 Aug 2023 14:12:19 -0700
Subject: [PATCH 05/16] Update readme with archive message (#3408)

Co-authored-by: Adam <103067949+AdamL-Microsoft@users.noreply.github.com>
---
 README.md | 15 +++++++++++++++
 1 file changed, 15 insertions(+)

diff --git a/README.md b/README.md
index 010148dd3a..486dae6c15 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,20 @@
 # <img src="docs/onefuzz_text.svg" height="120" alt="OneFuzz" />
 
+# IMPORTANT NOTICE
+
+**_Since September 2020 when OneFuzz was first open sourced, we’ve been on a journey to create a best-in-class orchestrator for running fuzzers, driving security and quality into our products._**
+ 
+ 
+**_Initially launched by a small group in MSR, OneFuzz has now become a significant internal platform within Microsoft. As such, we are regretfully archiving the project to focus our attention on becoming a more deeply integrated service within the company. Unfortunately, we aren’t a large enough team to live in both the open-source world and the internal Microsoft world with its own unique set of requirements._**
+ 
+**_Our current plan is to archive the project in the next few months. That means we’ll still be making updates for a little while. Of course, even after it’s archived, you’ll still be able to fork it and make the changes you need. Once we’ve decided on a specific date for archiving, we’ll update this readme._**
+ 
+**_Thanks for taking the journey with us._**
+
+**_The OneFuzz team._**
+
+---
+
 [![Onefuzz build status](https://github.com/microsoft/onefuzz/workflows/Build/badge.svg?branch=main)](https://github.com/microsoft/onefuzz/actions/workflows/ci.yml?query=branch%3Amain)
 
 ## A self-hosted Fuzzing-As-A-Service platform

From b913074d529160a57815b25361c4288270f2f97d Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 31 Aug 2023 22:54:54 +0000
Subject: [PATCH 06/16] Bump tokio from 1.30.0 to 1.32.0 in /src/proxy-manager
 (#3425)

Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.30.0 to 1.32.0.
- [Release notes](https://github.com/tokio-rs/tokio/releases)
- [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.30.0...tokio-1.32.0)

---
updated-dependencies:
- dependency-name: tokio
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
 src/proxy-manager/Cargo.lock | 4 ++--
 src/proxy-manager/Cargo.toml | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/src/proxy-manager/Cargo.lock b/src/proxy-manager/Cargo.lock
index ca4813995e..848ea32156 100644
--- a/src/proxy-manager/Cargo.lock
+++ b/src/proxy-manager/Cargo.lock
@@ -1531,9 +1531,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
 
 [[package]]
 name = "tokio"
-version = "1.30.0"
+version = "1.32.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d3ce25f50619af8b0aec2eb23deebe84249e19e2ddd393a6e16e3300a6dadfd"
+checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9"
 dependencies = [
  "backtrace",
  "bytes",
diff --git a/src/proxy-manager/Cargo.toml b/src/proxy-manager/Cargo.toml
index c783e8d3aa..3a377a5f4d 100644
--- a/src/proxy-manager/Cargo.toml
+++ b/src/proxy-manager/Cargo.toml
@@ -20,7 +20,7 @@ serde = { version = "1.0", features = ["derive"] }
 serde_json = "1.0"
 storage-queue = { path = "../agent/storage-queue" }
 thiserror = "1.0"
-tokio = { version = "1.29", features = [
+tokio = { version = "1.32", features = [
     "macros",
     "rt-multi-thread",
     "fs",

From 14ab36ed5ffeb81588a8522c9469f59973eb14ef Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 31 Aug 2023 23:33:16 +0000
Subject: [PATCH 07/16] Bump tokio from 1.30.0 to 1.32.0 in /src/agent (#3424)

Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.30.0 to 1.32.0.
- [Release notes](https://github.com/tokio-rs/tokio/releases)
- [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.30.0...tokio-1.32.0)

---
updated-dependencies:
- dependency-name: tokio
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
 src/agent/Cargo.lock                   | 4 ++--
 src/agent/onefuzz-agent/Cargo.toml     | 2 +-
 src/agent/onefuzz-task/Cargo.toml      | 2 +-
 src/agent/onefuzz-telemetry/Cargo.toml | 2 +-
 src/agent/onefuzz/Cargo.toml           | 2 +-
 src/agent/reqwest-retry/Cargo.toml     | 2 +-
 src/agent/storage-queue/Cargo.toml     | 2 +-
 7 files changed, 8 insertions(+), 8 deletions(-)

diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock
index 254684be97..10cb7a7531 100644
--- a/src/agent/Cargo.lock
+++ b/src/agent/Cargo.lock
@@ -3555,9 +3555,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
 
 [[package]]
 name = "tokio"
-version = "1.30.0"
+version = "1.32.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d3ce25f50619af8b0aec2eb23deebe84249e19e2ddd393a6e16e3300a6dadfd"
+checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9"
 dependencies = [
  "backtrace",
  "bytes",
diff --git a/src/agent/onefuzz-agent/Cargo.toml b/src/agent/onefuzz-agent/Cargo.toml
index 5ce8669766..90f44147c1 100644
--- a/src/agent/onefuzz-agent/Cargo.toml
+++ b/src/agent/onefuzz-agent/Cargo.toml
@@ -22,7 +22,7 @@ reqwest = { version = "0.11", features = [
 serde = { version = "1.0", features = ["derive"] }
 serde_json = "1.0"
 storage-queue = { path = "../storage-queue" }
-tokio = { version = "1.29", features = ["full"] }
+tokio = { version = "1.32", features = ["full"] }
 url = { version = "2.4", features = ["serde"] }
 uuid = { version = "1.4", features = ["serde", "v4"] }
 clap = { version = "4", features = ["derive", "cargo"] }
diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml
index 4e0bd381b0..cc64929cc6 100644
--- a/src/agent/onefuzz-task/Cargo.toml
+++ b/src/agent/onefuzz-task/Cargo.toml
@@ -48,7 +48,7 @@ stacktrace-parser = { path = "../stacktrace-parser" }
 storage-queue = { path = "../storage-queue" }
 tempfile = "3.7.0"
 thiserror = "1.0"
-tokio = { version = "1.29", features = ["full"] }
+tokio = { version = "1.32", features = ["full"] }
 tokio-util = { version = "0.7", features = ["full"] }
 tokio-stream = "0.1"
 tui = { package = "ratatui", version = "0.22.0", default-features = false, features = [
diff --git a/src/agent/onefuzz-telemetry/Cargo.toml b/src/agent/onefuzz-telemetry/Cargo.toml
index 23574a013f..8f91478b1f 100644
--- a/src/agent/onefuzz-telemetry/Cargo.toml
+++ b/src/agent/onefuzz-telemetry/Cargo.toml
@@ -15,5 +15,5 @@ chrono = { version = "0.4", default-features = false, features = [
 lazy_static = "1.4"
 log = "0.4"
 serde = { version = "1.0", features = ["derive"] }
-tokio = { version = "1.29", features = ["full"] }
+tokio = { version = "1.32", features = ["full"] }
 uuid = { version = "1.4", features = ["serde", "v4"] }
diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml
index 1f3c27985c..440a018591 100644
--- a/src/agent/onefuzz/Cargo.toml
+++ b/src/agent/onefuzz/Cargo.toml
@@ -31,7 +31,7 @@ serde = "1.0"
 serde_json = "1.0"
 rand = "0.8"
 serde_derive = "1.0"
-tokio = { version = "1.29", features = ["full"] }
+tokio = { version = "1.32", features = ["full"] }
 tokio-stream = { version = "0.1", features = ["fs", "time", "tokio-util"] }
 tokio-util = { version = "0.7", features = ["full"] }
 uuid = { version = "1.4", features = ["serde", "v4"] }
diff --git a/src/agent/reqwest-retry/Cargo.toml b/src/agent/reqwest-retry/Cargo.toml
index d7d12ff4e8..5ddfbbe419 100644
--- a/src/agent/reqwest-retry/Cargo.toml
+++ b/src/agent/reqwest-retry/Cargo.toml
@@ -19,5 +19,5 @@ reqwest = { version = "0.11", features = [
 thiserror = "1.0"
 
 [dev-dependencies]
-tokio = { version = "1.29", features = ["macros"] }
+tokio = { version = "1.32", features = ["macros"] }
 wiremock = "0.5"
diff --git a/src/agent/storage-queue/Cargo.toml b/src/agent/storage-queue/Cargo.toml
index d5c1c09d08..381a761c74 100644
--- a/src/agent/storage-queue/Cargo.toml
+++ b/src/agent/storage-queue/Cargo.toml
@@ -26,6 +26,6 @@ serde = { version = "1.0", features = ["derive"] }
 serde_derive = "1.0"
 serde_json = "1.0"
 bincode = "1.3"
-tokio = { version = "1.29", features = ["full"] }
+tokio = { version = "1.32", features = ["full"] }
 queue-file = "1.4"
 uuid = { version = "1.4", features = ["serde", "v4"] }

From f141050bbbf7ddc6f42881cb82ded38e2924fdb3 Mon Sep 17 00:00:00 2001
From: Kanan B <32438208+kananb@users.noreply.github.com>
Date: Fri, 1 Sep 2023 09:32:39 -0700
Subject: [PATCH 08/16] Remove unnecessary method argument (#3473)

---
 src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs
index 98b857c9bc..3780bc1b2b 100644
--- a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs
+++ b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs
@@ -239,7 +239,7 @@ private static async Async.Task ProcessNotification(IOnefuzzContext context, Con
 
         var renderedConfig = RenderAdoTemplate(logTracer, renderer, config, instanceUrl);
         var ado = new AdoConnector(renderedConfig, project!, client, instanceUrl, logTracer, await GetValidFields(client, project));
-        await ado.Process(notificationInfo, config.AdoDuplicateFields);
+        await ado.Process(notificationInfo);
     }
 
     public static RenderedAdoTemplate RenderAdoTemplate(ILogger logTracer, Renderer renderer, AdoTemplate original, Uri instanceUrl) {
@@ -526,7 +526,7 @@ private async Async.Task<WorkItem> CreateNew() {
             return (taskType, document);
         }
 
-        public async Async.Task Process(IList<(string, string)> notificationInfo, Dictionary<string, string>? duplicateFields) {
+        public async Async.Task Process(IList<(string, string)> notificationInfo) {
             var updated = false;
             WorkItem? oldestWorkItem = null;
             await foreach (var workItem in ExistingWorkItems(notificationInfo)) {
@@ -536,7 +536,7 @@ public async Async.Task Process(IList<(string, string)> notificationInfo, Dictio
                     _logTracer.AddTags(new List<(string, string)> { ("MatchingWorkItemIds", $"{workItem.Id}") });
                     _logTracer.LogInformation("Found matching work item");
                 }
-                if (IsADODuplicateWorkItem(workItem, duplicateFields)) {
+                if (IsADODuplicateWorkItem(workItem, _config.AdoDuplicateFields)) {
                     continue;
                 }
 

From d4319d209f5cf511b28ef627eec7e66263e9bd9b Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 4 Sep 2023 23:55:08 +0000
Subject: [PATCH 09/16] Bump elsa from 1.8.1 to 1.9.0 in /src/agent (#3411)

Bumps [elsa](https://github.com/manishearth/elsa) from 1.8.1 to 1.9.0.
- [Commits](https://github.com/manishearth/elsa/compare/v1.8.1...v1.9.0)

---
updated-dependencies:
- dependency-name: elsa
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
 src/agent/Cargo.lock                   | 4 ++--
 src/agent/debuggable-module/Cargo.toml | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock
index 10cb7a7531..5a3d59d060 100644
--- a/src/agent/Cargo.lock
+++ b/src/agent/Cargo.lock
@@ -883,9 +883,9 @@ dependencies = [
 
 [[package]]
 name = "elsa"
-version = "1.8.1"
+version = "1.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5e0aca8dce8856e420195bd13b6a64de3334235ccc9214e824b86b12bf26283"
+checksum = "714f766f3556b44e7e4776ad133fcc3445a489517c25c704ace411bb14790194"
 dependencies = [
  "stable_deref_trait",
 ]
diff --git a/src/agent/debuggable-module/Cargo.toml b/src/agent/debuggable-module/Cargo.toml
index 1cd11dfd30..ee464961f7 100644
--- a/src/agent/debuggable-module/Cargo.toml
+++ b/src/agent/debuggable-module/Cargo.toml
@@ -6,7 +6,7 @@ license = "MIT"
 
 [dependencies]
 anyhow = "1.0"
-elsa = "1.8.1"
+elsa = "1.9.0"
 gimli = "0.27.2"
 goblin = "0.6"
 iced-x86 = "1.20"

From 93b16ec54858bbf14a08516bbe1f9b6c30fed92f Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 5 Sep 2023 00:16:15 +0000
Subject: [PATCH 10/16] Bump tempfile from 3.7.1 to 3.8.0 in /src/agent (#3437)

Bumps [tempfile](https://github.com/Stebalien/tempfile) from 3.7.1 to 3.8.0.
- [Changelog](https://github.com/Stebalien/tempfile/blob/master/CHANGELOG.md)
- [Commits](https://github.com/Stebalien/tempfile/compare/v3.7.1...v3.8.0)

---
updated-dependencies:
- dependency-name: tempfile
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
 src/agent/Cargo.lock              | 4 ++--
 src/agent/coverage/Cargo.toml     | 2 +-
 src/agent/onefuzz-task/Cargo.toml | 2 +-
 src/agent/onefuzz/Cargo.toml      | 2 +-
 src/agent/win-util/Cargo.toml     | 2 +-
 5 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock
index 5a3d59d060..b74d4055a7 100644
--- a/src/agent/Cargo.lock
+++ b/src/agent/Cargo.lock
@@ -3468,9 +3468,9 @@ dependencies = [
 
 [[package]]
 name = "tempfile"
-version = "3.7.1"
+version = "3.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc02fddf48964c42031a0b3fe0428320ecf3a73c401040fc0096f97794310651"
+checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef"
 dependencies = [
  "cfg-if 1.0.0",
  "fastrand 2.0.0",
diff --git a/src/agent/coverage/Cargo.toml b/src/agent/coverage/Cargo.toml
index 29e67523d9..cca6dc2e4b 100644
--- a/src/agent/coverage/Cargo.toml
+++ b/src/agent/coverage/Cargo.toml
@@ -38,5 +38,5 @@ pretty_assertions = "1.4.0"
 insta = { version = "1.31.0", features = ["glob"] }
 coverage = { path = "../coverage" }
 cc = "1.0"
-tempfile = "3.7.0"
+tempfile = "3.8.0"
 dunce = "1.0"
diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml
index cc64929cc6..4b7b4da730 100644
--- a/src/agent/onefuzz-task/Cargo.toml
+++ b/src/agent/onefuzz-task/Cargo.toml
@@ -46,7 +46,7 @@ strum = "0.25"
 strum_macros = "0.25"
 stacktrace-parser = { path = "../stacktrace-parser" }
 storage-queue = { path = "../storage-queue" }
-tempfile = "3.7.0"
+tempfile = "3.8.0"
 thiserror = "1.0"
 tokio = { version = "1.32", features = ["full"] }
 tokio-util = { version = "0.7", features = ["full"] }
diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml
index 440a018591..b5b7837d8e 100644
--- a/src/agent/onefuzz/Cargo.toml
+++ b/src/agent/onefuzz/Cargo.toml
@@ -40,7 +40,7 @@ url-escape = "0.1.0"
 storage-queue = { path = "../storage-queue" }
 strum = "0.25"
 strum_macros = "0.25"
-tempfile = "3.7.0"
+tempfile = "3.8.0"
 process_control = "4.0"
 reqwest-retry = { path = "../reqwest-retry" }
 onefuzz-telemetry = { path = "../onefuzz-telemetry" }
diff --git a/src/agent/win-util/Cargo.toml b/src/agent/win-util/Cargo.toml
index 1edaa3fc58..2c4f1065bf 100644
--- a/src/agent/win-util/Cargo.toml
+++ b/src/agent/win-util/Cargo.toml
@@ -33,4 +33,4 @@ features = [
 ]
 
 [dev-dependencies]
-tempfile = "3.7.0"
+tempfile = "3.8.0"

From 7f7ab370f3b1acd188e23c6e917f3555be0c7464 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 5 Sep 2023 00:34:45 +0000
Subject: [PATCH 11/16] Bump tempfile from 3.7.1 to 3.8.0 in /src/proxy-manager
 (#3436)

Bumps [tempfile](https://github.com/Stebalien/tempfile) from 3.7.1 to 3.8.0.
- [Changelog](https://github.com/Stebalien/tempfile/blob/master/CHANGELOG.md)
- [Commits](https://github.com/Stebalien/tempfile/compare/v3.7.1...v3.8.0)

---
updated-dependencies:
- dependency-name: tempfile
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
 src/proxy-manager/Cargo.lock | 4 ++--
 src/proxy-manager/Cargo.toml | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/src/proxy-manager/Cargo.lock b/src/proxy-manager/Cargo.lock
index 848ea32156..4eae33643a 100644
--- a/src/proxy-manager/Cargo.lock
+++ b/src/proxy-manager/Cargo.lock
@@ -1474,9 +1474,9 @@ dependencies = [
 
 [[package]]
 name = "tempfile"
-version = "3.7.1"
+version = "3.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc02fddf48964c42031a0b3fe0428320ecf3a73c401040fc0096f97794310651"
+checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef"
 dependencies = [
  "cfg-if",
  "fastrand",
diff --git a/src/proxy-manager/Cargo.toml b/src/proxy-manager/Cargo.toml
index 3a377a5f4d..b2258e994b 100644
--- a/src/proxy-manager/Cargo.toml
+++ b/src/proxy-manager/Cargo.toml
@@ -31,4 +31,4 @@ reqwest-retry = { path = "../agent/reqwest-retry" }
 onefuzz-telemetry = { path = "../agent/onefuzz-telemetry" }
 uuid = { version = "1.4", features = ["serde"] }
 log = "0.4"
-tempfile = "3.7.0"
+tempfile = "3.8.0"

From b2e6a07ac0cd16780c878430d2235bdce0045be8 Mon Sep 17 00:00:00 2001
From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com>
Date: Tue, 5 Sep 2023 11:20:52 -0700
Subject: [PATCH 12/16] Updating requirements.txt to accept >= onefuzztypes.
 (#3477)

* Updating requirements.txt to accept >= onefuzztypes.

* Trying to loosen restriction.
---
 src/ci/set-versions.sh   | 2 +-
 src/ci/unset-versions.sh | 2 +-
 src/cli/requirements.txt | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/src/ci/set-versions.sh b/src/ci/set-versions.sh
index 34c30ea37c..174b77f8ea 100755
--- a/src/ci/set-versions.sh
+++ b/src/ci/set-versions.sh
@@ -14,4 +14,4 @@ SET_VERSIONS="src/pytypes/onefuzztypes/__version__.py src/cli/onefuzz/__version_
 SET_REQS="src/cli/requirements.txt"
 
 sed -i "s/0.0.0/${VERSION}/" ${SET_VERSIONS}
-sed -i "s/onefuzztypes==0.0.0/onefuzztypes==${VERSION}/" ${SET_REQS}
+sed -i "s/onefuzztypes~=0.0.0/onefuzztypes==${VERSION}/" ${SET_REQS}
diff --git a/src/ci/unset-versions.sh b/src/ci/unset-versions.sh
index 95cbfac3d5..9312e6b4d8 100755
--- a/src/ci/unset-versions.sh
+++ b/src/ci/unset-versions.sh
@@ -12,4 +12,4 @@ SET_VERSIONS="src/pytypes/onefuzztypes/__version__.py src/cli/onefuzz/__version_
 SET_REQS="src/cli/requirements.txt"
 
 sed -i 's/__version__ = .*/__version__ = "0.0.0"/' ${SET_VERSIONS}
-sed -i "s/onefuzztypes==.*/onefuzztypes==0.0.0/" ${SET_REQS}
+sed -i "s/onefuzztypes==.*/onefuzztypes~=0.0.0/" ${SET_REQS}
diff --git a/src/cli/requirements.txt b/src/cli/requirements.txt
index d9977fe03e..475f86acc4 100644
--- a/src/cli/requirements.txt
+++ b/src/cli/requirements.txt
@@ -21,4 +21,4 @@ opentelemetry-instrumentation-requests==0.37b0
 # install rsa version >=4.7 to fix CVE-2020-25658
 rsa>=4.7
 # onefuzztypes version is set during build
-onefuzztypes==0.0.0
+onefuzztypes~=0.0.0

From aa9c9ea1779eba98087acca2380b2f0763a2d402 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 5 Sep 2023 20:19:44 +0000
Subject: [PATCH 13/16] Bump notify from 6.0.1 to 6.1.1 in /src/agent (#3435)

Bumps [notify](https://github.com/notify-rs/notify) from 6.0.1 to 6.1.1.
- [Release notes](https://github.com/notify-rs/notify/releases)
- [Changelog](https://github.com/notify-rs/notify/blob/main/CHANGELOG.md)
- [Commits](https://github.com/notify-rs/notify/compare/notify-6.0.1...notify-6.1.1)

---
updated-dependencies:
- dependency-name: notify
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
 src/agent/Cargo.lock         | 9 +++++----
 src/agent/onefuzz/Cargo.toml | 2 +-
 2 files changed, 6 insertions(+), 5 deletions(-)

diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock
index b74d4055a7..88aef03b88 100644
--- a/src/agent/Cargo.lock
+++ b/src/agent/Cargo.lock
@@ -2035,18 +2035,19 @@ dependencies = [
 
 [[package]]
 name = "notify"
-version = "6.0.1"
+version = "6.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5738a2795d57ea20abec2d6d76c6081186709c0024187cd5977265eda6598b51"
+checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d"
 dependencies = [
- "bitflags 1.3.2",
+ "bitflags 2.3.3",
  "filetime",
  "inotify",
  "kqueue",
  "libc",
+ "log",
  "mio 0.8.8",
  "walkdir",
- "windows-sys 0.45.0",
+ "windows-sys 0.48.0",
 ]
 
 [[package]]
diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml
index b5b7837d8e..8c09477891 100644
--- a/src/agent/onefuzz/Cargo.toml
+++ b/src/agent/onefuzz/Cargo.toml
@@ -18,7 +18,7 @@ futures-util = "0.3"
 hex = "0.4"
 lazy_static = "1.4"
 log = "0.4"
-notify = { version = "6.0.1", default-features = false }
+notify = { version = "6.1.1", default-features = false }
 regex = "1.9.1"
 reqwest = { version = "0.11", features = [
     "json",

From 74475cc3fa9995f82db0e879c31b1a07b8034cf9 Mon Sep 17 00:00:00 2001
From: George Pollard <gpollard@microsoft.com>
Date: Wed, 6 Sep 2023 08:44:48 +1200
Subject: [PATCH 14/16] Bump azure_* crates (#3478)

---
 src/agent/Cargo.lock               | 30 ++++++++++--------------------
 src/agent/onefuzz-agent/Cargo.toml |  6 +++---
 src/agent/onefuzz-task/Cargo.toml  |  6 +++---
 3 files changed, 16 insertions(+), 26 deletions(-)

diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock
index 88aef03b88..1885f8b76d 100644
--- a/src/agent/Cargo.lock
+++ b/src/agent/Cargo.lock
@@ -200,9 +200,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
 
 [[package]]
 name = "azure_core"
-version = "0.13.0"
+version = "0.14.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "86b0f0eea648347e40f5f7f7e6bfea4553bcefad0fbf52044ea339e5ce3aba61"
+checksum = "2331555a3618a32516c6172a63e9fec4af0edb43c6fcfeb5303a0716fc34498b"
 dependencies = [
  "async-trait",
  "base64 0.21.2",
@@ -214,7 +214,7 @@ dependencies = [
  "log",
  "paste",
  "pin-project",
- "quick-xml 0.29.0",
+ "quick-xml",
  "rand 0.8.5",
  "reqwest",
  "rustc_version",
@@ -227,9 +227,9 @@ dependencies = [
 
 [[package]]
 name = "azure_storage"
-version = "0.13.0"
+version = "0.14.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32d9cfa13ed9acb51cd663e04f343bd550a92b455add96c90de387a9a6bc4dbc"
+checksum = "16565073e533053f4e29e6b139de2af758e984108a1cddbb1a432387e7f4474d"
 dependencies = [
  "RustyXML",
  "async-trait",
@@ -250,9 +250,9 @@ dependencies = [
 
 [[package]]
 name = "azure_storage_blobs"
-version = "0.13.1"
+version = "0.14.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57cb0fe58af32a3fb49e560613cb1e4937f9f13161a2c1caf1bba0224435f2af"
+checksum = "0900e63940d1ba51039efda3d8cf658157a1c75449081a6e18069d2588809329"
 dependencies = [
  "RustyXML",
  "azure_core",
@@ -510,7 +510,7 @@ name = "cobertura"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "quick-xml 0.30.0",
+ "quick-xml",
 ]
 
 [[package]]
@@ -2194,7 +2194,7 @@ dependencies = [
  "coverage",
  "debuggable-module",
  "pretty_assertions",
- "quick-xml 0.30.0",
+ "quick-xml",
  "serde",
  "serde_json",
 ]
@@ -2588,16 +2588,6 @@ dependencies = [
  "snafu",
 ]
 
-[[package]]
-name = "quick-xml"
-version = "0.29.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81b9228215d82c7b61490fec1de287136b5de6f5700f6e58ea9ad61a7964ca51"
-dependencies = [
- "memchr",
- "serde",
-]
-
 [[package]]
 name = "quick-xml"
 version = "0.30.0"
@@ -3279,7 +3269,7 @@ dependencies = [
  "flume",
  "num_cpus",
  "queue-file",
- "quick-xml 0.30.0",
+ "quick-xml",
  "regex",
  "reqwest",
  "reqwest-retry",
diff --git a/src/agent/onefuzz-agent/Cargo.toml b/src/agent/onefuzz-agent/Cargo.toml
index 90f44147c1..bc73d37973 100644
--- a/src/agent/onefuzz-agent/Cargo.toml
+++ b/src/agent/onefuzz-agent/Cargo.toml
@@ -31,13 +31,13 @@ onefuzz-telemetry = { path = "../onefuzz-telemetry" }
 backtrace = "0.3"
 ipc-channel = { git = "https://github.com/servo/ipc-channel", rev = "7f432aa" }
 dynamic-library = { path = "../dynamic-library" }
-azure_core = { version = "0.13", default-features = false, features = [
+azure_core = { version = "0.14", default-features = false, features = [
     "enable_reqwest",
 ] }
-azure_storage = { version = "0.13", default-features = false, features = [
+azure_storage = { version = "0.14", default-features = false, features = [
     "enable_reqwest",
 ] }
-azure_storage_blobs = { version = "0.13", default-features = false, features = [
+azure_storage_blobs = { version = "0.14", default-features = false, features = [
     "enable_reqwest",
 ] }
 
diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml
index 4b7b4da730..def8d8eab2 100644
--- a/src/agent/onefuzz-task/Cargo.toml
+++ b/src/agent/onefuzz-task/Cargo.toml
@@ -62,13 +62,13 @@ chrono = { version = "0.4", default-features = false, features = [
 ] }
 ipc-channel = { git = "https://github.com/servo/ipc-channel", rev = "7f432aa" }
 
-azure_core = { version = "0.13", default-features = false, features = [
+azure_core = { version = "0.14", default-features = false, features = [
     "enable_reqwest",
 ] }
-azure_storage = { version = "0.13", default-features = false, features = [
+azure_storage = { version = "0.14", default-features = false, features = [
     "enable_reqwest",
 ] }
-azure_storage_blobs = { version = "0.13", default-features = false, features = [
+azure_storage_blobs = { version = "0.14", default-features = false, features = [
     "enable_reqwest",
 ] }
 

From 6e4fceb2cbfd3011970e50bafa6aac9c7f744c8a Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 6 Sep 2023 09:44:38 +0000
Subject: [PATCH 15/16] Bump nix from 0.26.2 to 0.27.1 in /src/agent

Bumps [nix](https://github.com/nix-rust/nix) from 0.26.2 to 0.27.1.
- [Changelog](https://github.com/nix-rust/nix/blob/master/CHANGELOG.md)
- [Commits](https://github.com/nix-rust/nix/compare/v0.26.2...v0.27.1)

---
updated-dependencies:
- dependency-name: nix
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
---
 src/agent/Cargo.lock               | 32 +++++++++++++++++-------------
 src/agent/coverage/Cargo.toml      |  2 +-
 src/agent/onefuzz-agent/Cargo.toml |  2 +-
 src/agent/onefuzz/Cargo.toml       |  2 +-
 4 files changed, 21 insertions(+), 17 deletions(-)

diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock
index 1885f8b76d..d6dcae620c 100644
--- a/src/agent/Cargo.lock
+++ b/src/agent/Cargo.lock
@@ -572,7 +572,7 @@ dependencies = [
  "iced-x86",
  "insta",
  "log",
- "nix",
+ "nix 0.27.1",
  "pete",
  "pretty_assertions",
  "procfs",
@@ -719,7 +719,7 @@ version = "3.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2a011bbe2c35ce9c1f143b7af6f94f29a167beb4cd1d29e6740ce836f723120e"
 dependencies = [
- "nix",
+ "nix 0.26.4",
  "windows-sys 0.48.0",
 ]
 
@@ -1998,16 +1998,26 @@ checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
 
 [[package]]
 name = "nix"
-version = "0.26.2"
+version = "0.26.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a"
+checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
 dependencies = [
  "bitflags 1.3.2",
  "cfg-if 1.0.0",
  "libc",
  "memoffset 0.7.1",
  "pin-utils",
- "static_assertions",
+]
+
+[[package]]
+name = "nix"
+version = "0.27.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053"
+dependencies = [
+ "bitflags 2.3.3",
+ "cfg-if 1.0.0",
+ "libc",
 ]
 
 [[package]]
@@ -2122,7 +2132,7 @@ dependencies = [
  "input-tester",
  "lazy_static",
  "log",
- "nix",
+ "nix 0.27.1",
  "notify",
  "onefuzz-result",
  "onefuzz-telemetry",
@@ -2172,7 +2182,7 @@ dependencies = [
  "futures",
  "ipc-channel",
  "log",
- "nix",
+ "nix 0.27.1",
  "onefuzz",
  "onefuzz-telemetry",
  "reqwest",
@@ -2443,7 +2453,7 @@ checksum = "229eb6b3cb0d3d075727c614687ab08384cac3b75fa100e1e08b30d7bee39d00"
 dependencies = [
  "libc",
  "memoffset 0.8.0",
- "nix",
+ "nix 0.26.4",
  "thiserror",
 ]
 
@@ -3249,12 +3259,6 @@ dependencies = [
  "sha2",
 ]
 
-[[package]]
-name = "static_assertions"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
-
 [[package]]
 name = "storage-queue"
 version = "0.1.0"
diff --git a/src/agent/coverage/Cargo.toml b/src/agent/coverage/Cargo.toml
index cca6dc2e4b..cff869f7d5 100644
--- a/src/agent/coverage/Cargo.toml
+++ b/src/agent/coverage/Cargo.toml
@@ -25,7 +25,7 @@ thiserror = "1.0"
 debugger = { path = "../debugger" }
 
 [target.'cfg(target_os = "linux")'.dependencies]
-nix = "0.26"
+nix = "0.27"
 pete = "0.10"
 # For procfs, opt out of the `chrono` freature; it pulls in an old version
 # of `time`. We do not use the methods that the `chrono` feature enables.
diff --git a/src/agent/onefuzz-agent/Cargo.toml b/src/agent/onefuzz-agent/Cargo.toml
index bc73d37973..41965c6605 100644
--- a/src/agent/onefuzz-agent/Cargo.toml
+++ b/src/agent/onefuzz-agent/Cargo.toml
@@ -43,7 +43,7 @@ azure_storage_blobs = { version = "0.14", default-features = false, features = [
 
 
 [target.'cfg(target_family = "unix")'.dependencies]
-nix = "0.26"
+nix = "0.27"
 
 [target.'cfg(target_family = "windows")'.dependencies]
 windows = "0.48"
diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml
index 8c09477891..8fb92e0b9f 100644
--- a/src/agent/onefuzz/Cargo.toml
+++ b/src/agent/onefuzz/Cargo.toml
@@ -59,7 +59,7 @@ windows = { version = "0.48", features = [
 
 [target.'cfg(target_family = "unix")'.dependencies]
 cpp_demangle = "0.4"
-nix = "0.26"
+nix = "0.27"
 
 [target.'cfg(target_os = "linux")'.dependencies]
 pete = "0.10"

From 8b736486f859802d54a3c80b75f4d8ceada10b38 Mon Sep 17 00:00:00 2001
From: George Pollard <gpollard@microsoft.com>
Date: Wed, 6 Sep 2023 21:53:37 +0000
Subject: [PATCH 16/16] Add signal feature

---
 src/agent/onefuzz/Cargo.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml
index 8fb92e0b9f..ba972afeef 100644
--- a/src/agent/onefuzz/Cargo.toml
+++ b/src/agent/onefuzz/Cargo.toml
@@ -59,7 +59,7 @@ windows = { version = "0.48", features = [
 
 [target.'cfg(target_family = "unix")'.dependencies]
 cpp_demangle = "0.4"
-nix = "0.27"
+nix = { version = "0.27", features = ["signal"] }
 
 [target.'cfg(target_os = "linux")'.dependencies]
 pete = "0.10"