This repository has been archived by the owner on Nov 16, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 31
/
CardGenerator.cs
137 lines (114 loc) · 4.93 KB
/
CardGenerator.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
using System;
using System.Threading.Tasks;
using System.IO;
using System.Drawing;
using Microsoft.Azure.WebJobs.Host;
using static CoderCardsLibrary.ImageHelpersXPlat;
using Microsoft.Azure.WebJobs;
using Microsoft.ProjectOxford.Emotion;
using Microsoft.ProjectOxford.Common.Contract;
using Microsoft.Azure.WebJobs.Extensions.Http;
namespace CoderCardsLibrary
{
[StorageAccount("AzureWebJobsStorage")]
public class CardGenerator
{
[FunctionName("GenerateCard")]
public static async Task GenerateCard(
[QueueTrigger("%input-queue%")] CardInfoMessage cardInfo,
[Blob("%input-container%/{BlobName}", FileAccess.Read)] byte[] image,
[Blob("%output-container%/{BlobName}", FileAccess.Write)] Stream outputBlob,
TraceWriter log, ExecutionContext context)
{
Emotion[] faceDataArray = await RecognizeEmotionAsync(image, log);
if (faceDataArray == null) {
log.Error("No result from Emotion API");
return;
}
if (faceDataArray.Length == 0) {
log.Error("No face detected in image");
return;
}
var faceData = faceDataArray[0];
var testscores = new EmotionScores { Happiness = 1 };
string cardPath = GetCardImageAndScores(faceDataArray[0].Scores, out double score, context.FunctionDirectory); // assume exactly one face
MergeCardImage(cardPath, image, outputBlob, cardInfo.PersonName, cardInfo.Title, score);
//SaveAsJpeg(card, outputBlob);
}
[FunctionName("RequestImageProcessing")]
[return: Queue("%input-queue%")]
public static CardInfoMessage RequestImageProcessing([HttpTrigger(AuthorizationLevel.Anonymous, new string[] { "POST" })] CardInfoMessage input, TraceWriter log)
{
return input;
}
[FunctionName("Settings")]
public static SettingsMessage Settings([HttpTrigger(AuthorizationLevel.Anonymous, new string[] { "GET" })] string input, TraceWriter log)
{
string stage = (Environment.GetEnvironmentVariable("STAGE") == null) ? "LOCAL" : Environment.GetEnvironmentVariable("STAGE");
return new SettingsMessage() {
Stage = stage,
SiteURL = Environment.GetEnvironmentVariable("SITEURL"),
StorageURL = Environment.GetEnvironmentVariable("STORAGE_URL"),
ContainerSAS = Environment.GetEnvironmentVariable("CONTAINER_SAS"),
InputContainerName = Environment.GetEnvironmentVariable("input-container"),
OutputContainerName = Environment.GetEnvironmentVariable("output-container")
};
}
static string GetCardImageAndScores(EmotionScores scores, out double score, string functionDirectory)
{
NormalizeScores(scores);
var cardBack = "neutral.png";
score = scores.Neutral;
const int angerBoost = 2, happyBoost = 4;
if (scores.Surprise > 10) {
cardBack = "surprised.png";
score = scores.Surprise;
}
else if (scores.Anger > 10) {
cardBack = "angry.png";
score = scores.Anger * angerBoost;
}
else if (scores.Happiness > 50) {
cardBack = "happy.png";
score = scores.Happiness * happyBoost;
}
var path = Path.Combine(functionDirectory, "..\\", AssetsFolderLocation, cardBack);
return Path.GetFullPath(path);
}
#region Helpers
private const string EmotionAPIKeyName = "EmotionAPIKey";
private const string AssetsFolderLocation = "assets";
public class CardInfoMessage
{
public string PersonName { get; set; }
public string Title { get; set; }
public string BlobName { get; set; }
}
static async Task<Emotion[]> RecognizeEmotionAsync(byte[] image, TraceWriter log)
{
try
{
var emotionServiceClient = new EmotionServiceClient(Environment.GetEnvironmentVariable(EmotionAPIKeyName));
using (MemoryStream faceImageStream = new MemoryStream(image))
{
return await emotionServiceClient.RecognizeAsync(faceImageStream);
}
}
catch (Exception e)
{
log.Error("Error processing image", e);
return null;
}
}
public class SettingsMessage
{
public string Stage { get; set; }
public string SiteURL { get; set; }
public string StorageURL { get; set; }
public string ContainerSAS { get; set; }
public string InputContainerName { get; set; }
public string OutputContainerName { get; set; }
}
#endregion
}
}