Skip to content

Commit

Permalink
Add samples
Browse files Browse the repository at this point in the history
  • Loading branch information
leareai committed May 10, 2024
1 parent 53dc4b6 commit 14c7da9
Show file tree
Hide file tree
Showing 35 changed files with 1,064 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.vision.face.samples;

import com.azure.ai.vision.face.FaceClient;
import com.azure.ai.vision.face.FaceClientBuilder;
import com.azure.ai.vision.face.samples.utils.ConfigurationHelper;
import com.azure.ai.vision.face.samples.utils.Resources;
import com.azure.ai.vision.face.samples.utils.Utils;
import com.azure.ai.vision.face.models.DetectOptions;
import com.azure.ai.vision.face.models.FaceDetectionModel;
import com.azure.ai.vision.face.models.FaceDetectionResult;
import com.azure.ai.vision.face.models.FaceRecognitionModel;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.util.BinaryData;

import java.nio.file.FileSystems;
import java.util.Arrays;
import java.util.List;

import static com.azure.ai.vision.face.samples.utils.Utils.log;
import static com.azure.ai.vision.face.models.FaceAttributeType.Detection01;
import static com.azure.ai.vision.face.models.FaceAttributeType.Detection03;
import static com.azure.ai.vision.face.models.FaceAttributeType.Recognition04;

public class DetectFaces {
public static void main(String[] args) {
FaceClient client = new FaceClientBuilder()
.endpoint(ConfigurationHelper.getEndpoint())
.credential(new AzureKeyCredential(ConfigurationHelper.getAccountKey()))
.buildClient();

BinaryData imageBinary = BinaryData.fromFile(FileSystems.getDefault().getPath(Resources.TEST_IMAGE_PATH_DETECT_SAMPLE_IMAGE));
List<FaceDetectionResult> detectResult = client.detect(
imageBinary,
FaceDetectionModel.DETECTION_03,
FaceRecognitionModel.RECOGNITION_04,
true,
Arrays.asList(Detection03.HEAD_POSE, Detection03.MASK, Recognition04.QUALITY_FOR_RECOGNITION),
false,
true,
120);

detectResult.forEach(face -> log("Detected Face by file:" + Utils.toString(face) + "\n"));

DetectOptions options = new DetectOptions(FaceDetectionModel.DETECTION_01, FaceRecognitionModel.RECOGNITION_04, false)
.setReturnFaceAttributes(Arrays.asList(Detection01.ACCESSORIES, Detection01.GLASSES, Detection01.EXPOSURE, Detection01.NOISE))
.setReturnFaceLandmarks(true);

detectResult = client.detectFromUrl(Resources.TEST_IMAGE_URL_DETECT_SAMPLE, options);
detectResult.forEach(face -> log("Detected Faces from URL:" + Utils.toString(face) + "\n"));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.vision.face.samples;

import com.azure.ai.vision.face.FaceAsyncClient;
import com.azure.ai.vision.face.FaceClientBuilder;
import com.azure.ai.vision.face.samples.utils.ConfigurationHelper;
import com.azure.ai.vision.face.samples.utils.Resources;
import com.azure.ai.vision.face.samples.utils.Utils;
import com.azure.ai.vision.face.models.DetectOptions;
import com.azure.ai.vision.face.models.FaceDetectionModel;
import com.azure.ai.vision.face.models.FaceDetectionResult;
import com.azure.ai.vision.face.models.FaceRecognitionModel;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.util.BinaryData;
import reactor.core.publisher.Flux;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import static com.azure.ai.vision.face.samples.utils.Utils.log;
import static com.azure.ai.vision.face.models.FaceAttributeType.Detection01;
import static com.azure.ai.vision.face.models.FaceAttributeType.Detection03;
import static com.azure.ai.vision.face.models.FaceAttributeType.Recognition04;

public class DetectFacesAsync {
public static void main(String[] args) {
List<Flux<?>> fluxList = new ArrayList<>();
FaceAsyncClient client = new FaceClientBuilder()
.endpoint(ConfigurationHelper.getEndpoint())
.credential(new AzureKeyCredential(ConfigurationHelper.getAccountKey()))
.buildAsyncClient();

BinaryData imageBinary = Utils.loadFromFile(Resources.TEST_IMAGE_PATH_DETECT_SAMPLE_IMAGE);
Flux<FaceDetectionResult> flux = client.detect(
imageBinary,
FaceDetectionModel.DETECTION_03,
FaceRecognitionModel.RECOGNITION_04,
true,
Arrays.asList(Detection03.HEAD_POSE, Detection03.MASK, Recognition04.QUALITY_FOR_RECOGNITION),
false,
true,
120)
.flatMapMany(Flux::fromIterable);

flux.subscribe(face -> log("Detected Face by file:" + Utils.toString(face) + "\n"));
fluxList.add(flux);

DetectOptions options = new DetectOptions(FaceDetectionModel.DETECTION_01, FaceRecognitionModel.RECOGNITION_04, false)
.setReturnFaceAttributes(Arrays.asList(Detection01.ACCESSORIES, Detection01.GLASSES, Detection01.EXPOSURE, Detection01.NOISE))
.setReturnFaceLandmarks(true);

flux = client.detectFromUrl(Resources.TEST_IMAGE_URL_DETECT_SAMPLE, options)
.flatMapMany(Flux::fromIterable);

flux.subscribe(face -> log("Detected Face from URL:" + Utils.toString(face) + "\n"));
fluxList.add(flux);

fluxList.forEach(Flux::blockLast);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.vision.face.samples;

import com.azure.ai.vision.face.FaceSessionClient;
import com.azure.ai.vision.face.FaceSessionClientBuilder;
import com.azure.ai.vision.face.models.CreateLivenessSessionContent;
import com.azure.ai.vision.face.models.CreateLivenessSessionResult;
import com.azure.ai.vision.face.models.LivenessOperationMode;
import com.azure.ai.vision.face.models.LivenessSession;
import com.azure.ai.vision.face.models.LivenessSessionAuditEntry;
import com.azure.ai.vision.face.models.LivenessSessionItem;
import com.azure.ai.vision.face.samples.utils.ConfigurationHelper;
import com.azure.ai.vision.face.samples.utils.Resources;
import com.azure.ai.vision.face.samples.utils.Utils;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.util.BinaryData;

import java.util.List;
import java.util.UUID;

import static com.azure.ai.vision.face.samples.utils.Utils.log;
import static com.azure.ai.vision.face.samples.utils.Utils.logObject;

public class DetectLiveness {
public static void main(String[] args) {
// This sample follows below documentation
// https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness
// We will the steps in
// https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness#orchestrate-the-liveness-solution
// to demo the sample code in app server

// 1. A client device will send a request to start liveness check to us
waitingForLivenessRequest();

// 2.Send a request to Face API to create a liveness session
// Create a FaceSessionClient
FaceSessionClient faceSessionClient = new FaceSessionClientBuilder()
.endpoint(ConfigurationHelper.getEndpoint())
.credential(new AzureKeyCredential(ConfigurationHelper.getAccountKey()))
.buildClient();

// Create a liveness session
CreateLivenessSessionContent parameters = new CreateLivenessSessionContent(LivenessOperationMode.PASSIVE)
.setDeviceCorrelationId(UUID.randomUUID().toString())
.setSendResultsToClient(false)
.setAuthTokenTimeToLiveInSeconds(60);
BinaryData data = Utils.loadFromFile(Resources.TEST_IMAGE_PATH_DETECTLIVENESS_VERIFYIMAGE);
CreateLivenessSessionResult livenessSessionCreationResult = faceSessionClient.createLivenessSession(parameters);
logObject("Create a liveness session: ", livenessSessionCreationResult);
String token = livenessSessionCreationResult.getAuthToken();

// 3. Pass the AuthToken to client device
// Client device will process the step 4, 5, 6 in the documentation 'Orchestrate the liveness solution'
sendTokenToClientDevices(token);

// 7. Wait for client device notify us that liveness session has completed.
waitingForLivenessSessionComplete();

// 8. After client devices perform the action, we can get the result from the following API
LivenessSession sessionResult = faceSessionClient.getLivenessSessionResult(livenessSessionCreationResult.getSessionId());
logObject("Get liveness session result after client device complete liveness check: ", sessionResult);

// Get the details of all the request/response for liveness check for this sessions
List<LivenessSessionAuditEntry> auditEntries = faceSessionClient.getLivenessSessionAuditEntries(
livenessSessionCreationResult.getSessionId());
logObject("Get audit entries: ", auditEntries);

// We can also list all the liveness sessions of this face account.
List<LivenessSessionItem> sessions = faceSessionClient.getLivenessSessions();
logObject("List all the liveness sessions: ", sessions);

// Delete this session
faceSessionClient.deleteLivenessSession(livenessSessionCreationResult.getSessionId());
}

private static void waitingForLivenessSessionComplete() {
log("Please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness and use the mobile client SDK to perform liveness detection on your mobile application.");
Utils.pressAnyKeyToContinue("Press any key to continue when you complete these steps to run sample to get session results ...");
}

private static void sendTokenToClientDevices(String token) {
// Logic to send token to client devices
}

private static void waitingForLivenessRequest() {
// Logic to wait for request from client device
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.vision.face.samples;

import com.azure.ai.vision.face.FaceSessionAsyncClient;
import com.azure.ai.vision.face.FaceSessionClientBuilder;
import com.azure.ai.vision.face.models.CreateLivenessSessionContent;
import com.azure.ai.vision.face.models.CreateLivenessSessionResult;
import com.azure.ai.vision.face.models.LivenessOperationMode;
import com.azure.ai.vision.face.models.LivenessSession;
import com.azure.ai.vision.face.models.LivenessSessionAuditEntry;
import com.azure.ai.vision.face.models.LivenessSessionItem;
import com.azure.ai.vision.face.samples.utils.ConfigurationHelper;
import com.azure.ai.vision.face.samples.utils.Resources;
import com.azure.ai.vision.face.samples.utils.Utils;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.util.BinaryData;

import java.util.List;
import java.util.UUID;

import static com.azure.ai.vision.face.samples.utils.Utils.log;
import static com.azure.ai.vision.face.samples.utils.Utils.logObject;

public class DetectLivenessAsync {
public static void main(String[] args) {
// This sample follows below documentation
// https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness
// We will the steps in
// https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness#orchestrate-the-liveness-solution
// to demo the sample code in app server

// 1. A client device will send a request to start liveness check to us
waitingForLivenessRequest();

// 2.Send a request to Face API to create a liveness with verify session with a VerifyImage
// Create a FaceSessionClient
FaceSessionAsyncClient faceSessionClient = new FaceSessionClientBuilder()
.endpoint(ConfigurationHelper.getEndpoint())
.credential(new AzureKeyCredential(ConfigurationHelper.getAccountKey()))
.buildAsyncClient();

// Create a liveness session
CreateLivenessSessionContent parameters = new CreateLivenessSessionContent(LivenessOperationMode.PASSIVE)
.setDeviceCorrelationId(UUID.randomUUID().toString())
.setSendResultsToClient(false)
.setAuthTokenTimeToLiveInSeconds(60);
BinaryData data = Utils.loadFromFile(Resources.TEST_IMAGE_PATH_DETECTLIVENESS_VERIFYIMAGE);
CreateLivenessSessionResult livenessSessionCreationResult = faceSessionClient.createLivenessSession(parameters)
.block();
logObject("Create a liveness session: ", livenessSessionCreationResult);
String token = livenessSessionCreationResult.getAuthToken();

// 3. Pass the AuthToken to client device
// Client device will process the step 4, 5, 6 in the documentation 'Orchestrate the liveness solution'
sendTokenToClientDevices(token);

// 7. wait for client device notify us that liveness session completed.
waitingForLivenessSessionComplete();

// 8. After client devices perform the action, we can get the result from the following API
LivenessSession sessionResult = faceSessionClient.getLivenessSessionResult(livenessSessionCreationResult.getSessionId())
.block();
logObject("Get liveness session result after client device complete liveness check: ", sessionResult);

// Get the details of all the request/response for liveness check for this sessions
List<LivenessSessionAuditEntry> auditEntries = faceSessionClient.getLivenessSessionAuditEntries(
livenessSessionCreationResult.getSessionId())
.block();
logObject("Get audit entries: ", auditEntries);

// We can also list all the liveness sessions of this face account.
List<LivenessSessionItem> sessions = faceSessionClient.getLivenessSessions()
.block();
logObject("List all the liveness sessions: ", sessions);

// Delete this session
faceSessionClient.deleteLivenessSession(livenessSessionCreationResult.getSessionId())
.block();
}

private static void waitingForLivenessSessionComplete() {
log("Please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness to download client SDK to run session starts and detect liveness call.");
Utils.pressAnyKeyToContinue("Press any key to continue when you complete these steps to run sample to get session results ...");
}

private static void sendTokenToClientDevices(String token) {
// Logic to send token to client devices
}

private static void waitingForLivenessRequest() {
// Logic to wait for request from client device
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.vision.face.samples;

import com.azure.ai.vision.face.FaceSessionClient;
import com.azure.ai.vision.face.FaceSessionClientBuilder;
import com.azure.ai.vision.face.models.CreateLivenessSessionContent;
import com.azure.ai.vision.face.models.CreateLivenessWithVerifySessionResult;
import com.azure.ai.vision.face.models.LivenessOperationMode;
import com.azure.ai.vision.face.models.LivenessSessionAuditEntry;
import com.azure.ai.vision.face.models.LivenessSessionItem;
import com.azure.ai.vision.face.models.LivenessWithVerifySession;
import com.azure.ai.vision.face.samples.utils.ConfigurationHelper;
import com.azure.ai.vision.face.samples.utils.Resources;
import com.azure.ai.vision.face.samples.utils.Utils;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.util.BinaryData;

import java.util.List;
import java.util.UUID;

import static com.azure.ai.vision.face.samples.utils.Utils.log;
import static com.azure.ai.vision.face.samples.utils.Utils.logObject;

public class DetectLivenessWithVerify {
public static void main(String[] args) {
// This sample follows below documentation
// https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness
// We will the steps in
// https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness#orchestrate-the-liveness-solution
// to demo the sample code in app server

// 1. A client device will send a request to start liveness check to us
waitingForLivenessRequest();

// 2.Send a request to Face API to create a liveness with verify session with a VerifyImage
// Create a FaceSessionClient
FaceSessionClient faceSessionClient = new FaceSessionClientBuilder()
.endpoint(ConfigurationHelper.getEndpoint())
.credential(new AzureKeyCredential(ConfigurationHelper.getAccountKey()))
.buildClient();

CreateLivenessSessionContent parameters = new CreateLivenessSessionContent(LivenessOperationMode.PASSIVE)
.setDeviceCorrelationId(UUID.randomUUID().toString())
.setSendResultsToClient(false)
.setAuthTokenTimeToLiveInSeconds(60);
BinaryData data = Utils.loadFromFile(Resources.TEST_IMAGE_PATH_DETECTLIVENESS_VERIFYIMAGE);
CreateLivenessWithVerifySessionResult livenessSessionCreationResult = faceSessionClient.createLivenessWithVerifySession(parameters, data);
logObject("Create a liveness session: ", livenessSessionCreationResult);
String token = livenessSessionCreationResult.getAuthToken();

// 3. Pass the AuthToken to client device
// Client device will process the step 4, 5, 6 in the documentation 'Orchestrate the liveness solution'
sendTokenToClientDevices(token);

// 7. wait for client device notify us that liveness session completed.
waitingForLivenessSessionComplete();


// 8. After client devices perform the action, we can get the result from the following API
LivenessWithVerifySession sessionResult = faceSessionClient.getLivenessWithVerifySessionResult(livenessSessionCreationResult.getSessionId());
logObject("Get liveness session result after client device complete liveness check: ", sessionResult);

// Get the details of all the request/response for liveness check for this sessions
List<LivenessSessionAuditEntry> auditEntries = faceSessionClient.getLivenessWithVerifySessionAuditEntries(
livenessSessionCreationResult.getSessionId());
logObject("Get audit entries: ", auditEntries);

// We can also list all the liveness sessions of this face account.
List<LivenessSessionItem> sessions = faceSessionClient.getLivenessWithVerifySessions();
logObject("List all the liveness sessions: ", sessions);

// Delete this session
faceSessionClient.deleteLivenessWithVerifySession(livenessSessionCreationResult.getSessionId());
}

private static void waitingForLivenessSessionComplete() {
log("Please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness to download client SDK to run session starts and detect liveness call.");
Utils.pressAnyKeyToContinue("Press any key to continue when you complete these steps to run sample to get session results ...");
}

private static void sendTokenToClientDevices(String token) {
// Logic to send token to client devices
}

private static void waitingForLivenessRequest() {
// Logic to wait for request from client device
}
}
Loading

0 comments on commit 14c7da9

Please sign in to comment.