Skip to content
Merged
Show file tree
Hide file tree
Changes from 23 commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
026c572
Creation of DTS example and passing of completionToken
RyanLettieri Jan 22, 2025
136a3d0
Adressing review feedback
RyanLettieri Jan 22, 2025
6df1064
Reverting dapr readme
RyanLettieri Jan 22, 2025
f731c0d
Adding accessTokenManager class for refreshing credential token
RyanLettieri Jan 24, 2025
eb98416
Adding comments to the example
RyanLettieri Jan 24, 2025
0de338d
Adding in requirement for azure-identity
RyanLettieri Jan 24, 2025
6050771
Moving dts logic into its own module
RyanLettieri Jan 28, 2025
f4f98ee
Fixing whitesapce
RyanLettieri Jan 28, 2025
ea837d0
Updating dts client to refresh token
RyanLettieri Jan 29, 2025
f8d79d3
Cleaning up construction of dts objects and improving examples
RyanLettieri Jan 29, 2025
1e67651
Migrating shared access token logic to new grpc class
RyanLettieri Feb 4, 2025
6b1bfd2
Adding log statements to access_token_manager
RyanLettieri Feb 5, 2025
bd56a35
breaking for loop when setting interceptors
RyanLettieri Feb 5, 2025
efc0146
Removing changes to client.py and adding additional steps to readme.md
RyanLettieri Feb 7, 2025
3fd0b08
Refactoring client and worker to pass around interceptors
RyanLettieri Feb 11, 2025
4260d02
Fixing import for DefaultClientInterceptorImpl
RyanLettieri Feb 11, 2025
ec4617c
Adressing round 1 of feedback
RyanLettieri Feb 11, 2025
ed733ea
Fixing interceptor issue
RyanLettieri Feb 12, 2025
99f62d7
Moving some files around to remove dependencies
RyanLettieri Feb 12, 2025
f9d55ab
Adressing more feedback
RyanLettieri Feb 12, 2025
ba1ac4f
More review feedback
RyanLettieri Feb 12, 2025
2c251ea
Passing token credential as an argument rather than 2 strings
RyanLettieri Feb 13, 2025
9c65176
More review feedback for token passing
RyanLettieri Feb 13, 2025
877dabb
Addressing None comment and using correct metadata
RyanLettieri Feb 13, 2025
b39ffad
Updating unit tests
RyanLettieri Feb 13, 2025
33c8b11
Fixing the type for the unit test
RyanLettieri Feb 13, 2025
1da819e
Fixing grpc calls
RyanLettieri Feb 13, 2025
f690264
Merge branch 'main' into durabletask-scheduler
RyanLettieri Feb 13, 2025
6142220
Fix linter errors and update documentation
cgillum Feb 14, 2025
58f4f93
Specifying version reqiuirement for pyproject.toml
RyanLettieri Feb 18, 2025
d82c1b7
Updating README
RyanLettieri Feb 18, 2025
b3a099e
Adding comment for credential type
RyanLettieri Feb 18, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file.
Empty file.
29 changes: 29 additions & 0 deletions durabletask-azuremanaged/durabletask/azuremanaged/client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

from typing import Optional
from durabletask.client import TaskHubGrpcClient, OrchestrationStatus
from durabletask.azuremanaged.internal.access_token_manager import AccessTokenManager
from durabletask.azuremanaged.durabletask_grpc_interceptor import DTSDefaultClientInterceptorImpl
from azure.core.credentials import TokenCredential

# Client class used for Durable Task Scheduler (DTS)
class DurableTaskSchedulerClient(TaskHubGrpcClient):
def __init__(self, *,
host_address: str,
taskhub: str,
token_credential: TokenCredential = None,
secure_channel: Optional[bool] = True):

if taskhub == None:
raise ValueError("Taskhub value cannot be empty. Please provide a value for your taskhub")

self._interceptors = [DTSDefaultClientInterceptorImpl(token_credential, taskhub)]

# We pass in None for the metadata so we don't construct an additional interceptor in the parent class
# Since the parent class doesn't use anything metadata for anything else, we can set it as None
super().__init__(
host_address=host_address,
secure_channel=secure_channel,
metadata=None,
interceptors=self._interceptors)
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

from durabletask.internal.grpc_interceptor import _ClientCallDetails, DefaultClientInterceptorImpl
from durabletask.azuremanaged.internal.access_token_manager import AccessTokenManager
from azure.core.credentials import TokenCredential
import grpc

class DTSDefaultClientInterceptorImpl (DefaultClientInterceptorImpl):
"""The class implements a UnaryUnaryClientInterceptor, UnaryStreamClientInterceptor,
StreamUnaryClientInterceptor and StreamStreamClientInterceptor from grpc to add an
interceptor to add additional headers to all calls as needed."""

def __init__(self, token_credential: TokenCredential, taskhub_name: str):
metadata = [("taskhub", taskhub_name)]
super().__init__(metadata)

if token_credential is not None:
self._token_credential = token_credential
self._token_manager = AccessTokenManager(token_credential=self._token_credential)
token = self._token_manager.get_access_token()
self._metadata.append(("authorization", token))

def _intercept_call(
self, client_call_details: _ClientCallDetails) -> grpc.ClientCallDetails:
"""Internal intercept_call implementation which adds metadata to grpc metadata in the RPC
call details."""
# Refresh the auth token if it is present and needed
if self._metadata is not None:
for i, (key, _) in enumerate(self._metadata):
if key.lower() == "authorization": # Ensure case-insensitive comparison
new_token = self._token_manager.get_access_token() # Get the new token
self._metadata[i] = ("authorization", new_token) # Update the token

return super()._intercept_call(client_call_details)
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
from azure.identity import DefaultAzureCredential, ManagedIdentityCredential
from datetime import datetime, timedelta, timezone
from typing import Optional
import durabletask.internal.shared as shared
from azure.core.credentials import TokenCredential

# By default, when there's 10minutes left before the token expires, refresh the token
class AccessTokenManager:
def __init__(self, refresh_interval_seconds: int = 600, token_credential: TokenCredential = None):
self._scope = "https://durabletask.io/.default"
self._refresh_interval_seconds = refresh_interval_seconds
self._logger = shared.get_logger("token_manager")

self._credential = token_credential

self._token = self._credential.get_token(self._scope)
self.expiry_time = None

def get_access_token(self) -> str:
if self._token is None or self.is_token_expired():
self.refresh_token()
return self._token

# Checks if the token is expired, or if it will expire in the next "refresh_interval_seconds" seconds.
# For example, if the token is created to have a lifespan of 2 hours, and the refresh buffer is set to 30 minutes,
# We will grab a new token when there're 30minutes left on the lifespan of the token
def is_token_expired(self) -> bool:
if self.expiry_time is None:
return True
return datetime.now(timezone.utc) >= (self.expiry_time - timedelta(seconds=self._refresh_interval_seconds))

def refresh_token(self):
new_token = self._credential.get_token(self._scope)
self._token = f"Bearer {new_token.token}"

# Convert UNIX timestamp to timezone-aware datetime
self.expiry_time = datetime.fromtimestamp(new_token.expires_on, tz=timezone.utc)
self._logger.debug(f"Token refreshed. Expires at: {self.expiry_time}")
29 changes: 29 additions & 0 deletions durabletask-azuremanaged/durabletask/azuremanaged/worker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

from typing import Optional
from durabletask.worker import TaskHubGrpcWorker
from durabletask.azuremanaged.internal.access_token_manager import AccessTokenManager
from durabletask.azuremanaged.durabletask_grpc_interceptor import DTSDefaultClientInterceptorImpl
from azure.core.credentials import TokenCredential

# Worker class used for Durable Task Scheduler (DTS)
class DurableTaskSchedulerWorker(TaskHubGrpcWorker):
def __init__(self, *,
host_address: str,
taskhub: str,
token_credential: TokenCredential = None,
secure_channel: Optional[bool] = True):

if taskhub == None:
raise ValueError("Taskhub value cannot be empty. Please provide a value for your taskhub")

interceptors = [DTSDefaultClientInterceptorImpl(token_credential, taskhub)]

# We pass in None for the metadata so we don't construct an additional interceptor in the parent class
# Since the parent class doesn't use anything metadata for anything else, we can set it as None
super().__init__(
host_address=host_address,
secure_channel=secure_channel,
metadata=None,
interceptors=interceptors)
41 changes: 41 additions & 0 deletions durabletask-azuremanaged/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

# For more information on pyproject.toml, see https://peps.python.org/pep-0621/

[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"

[project]
name = "durabletask.azuremanaged"
version = "0.1b1"
description = "Extensions for the Durable Task Python SDK for integrating with the Durable Task Scheduler in Azure"
keywords = [
"durable",
"task",
"workflow",
"azure"
]
classifiers = [
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
]
requires-python = ">=3.9"
license = {file = "LICENSE"}
readme = "README.md"
dependencies = [
"durabletask",
"azure-identity"
]

[project.urls]
repository = "https://github.com/microsoft/durabletask-python"
changelog = "https://github.com/microsoft/durabletask-python/blob/main/CHANGELOG.md"

[tool.setuptools.packages.find]
include = ["durabletask.azuremanaged", "durabletask.azuremanaged.*"]

[tool.pytest.ini_options]
minversion = "6.0"
23 changes: 21 additions & 2 deletions durabletask/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
import durabletask.internal.orchestrator_service_pb2 as pb
import durabletask.internal.orchestrator_service_pb2_grpc as stubs
import durabletask.internal.shared as shared
from durabletask.internal.grpc_interceptor import DefaultClientInterceptorImpl

from durabletask import task

TInput = TypeVar('TInput')
Expand Down Expand Up @@ -96,8 +98,25 @@ def __init__(self, *,
metadata: Optional[list[tuple[str, str]]] = None,
log_handler: Optional[logging.Handler] = None,
log_formatter: Optional[logging.Formatter] = None,
secure_channel: bool = False):
channel = shared.get_grpc_channel(host_address, metadata, secure_channel=secure_channel)
secure_channel: bool = False,
interceptors: Optional[list[Union[grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor]]] = None):

# Determine the interceptors to use
if interceptors is not None:
self._interceptors = interceptors
if metadata:
self._interceptors.append(DefaultClientInterceptorImpl(metadata))
elif metadata:
self._interceptors = [DefaultClientInterceptorImpl(metadata)]
else:
self._interceptors = None

channel = shared.get_grpc_channel(
host_address=host_address,
metadata=metadata,
secure_channel=secure_channel,
interceptors=self._interceptors
)
self._stub = stubs.TaskHubSidecarServiceStub(channel)
self._logger = shared.get_logger("client", log_handler, log_formatter)

Expand Down
14 changes: 8 additions & 6 deletions durabletask/internal/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,10 @@
import logging
from types import SimpleNamespace
from typing import Any, Optional
from durabletask.internal.grpc_interceptor import DefaultClientInterceptorImpl

import grpc

from durabletask.internal.grpc_interceptor import DefaultClientInterceptorImpl

# Field name used to indicate that an object was automatically serialized
# and should be deserialized as a SimpleNamespace
AUTO_SERIALIZED = "__durabletask_autoobject__"
Expand All @@ -25,8 +24,10 @@ def get_default_host_address() -> str:

def get_grpc_channel(
host_address: Optional[str],
metadata: Optional[list[tuple[str, str]]],
secure_channel: bool = False) -> grpc.Channel:
metadata: Optional[list[tuple[str, str]]] = None,
secure_channel: bool = False,
interceptors: Optional[list[DefaultClientInterceptorImpl]] = None) -> grpc.Channel:

if host_address is None:
host_address = get_default_host_address()

Expand All @@ -44,13 +45,14 @@ def get_grpc_channel(
host_address = host_address[len(protocol):]
break

# Create the base channel
if secure_channel:
channel = grpc.secure_channel(host_address, grpc.ssl_channel_credentials())
else:
channel = grpc.insecure_channel(host_address)

if metadata is not None and len(metadata) > 0:
interceptors = [DefaultClientInterceptorImpl(metadata)]
# Apply interceptors ONLY if they exist
if interceptors:
channel = grpc.intercept_channel(channel, *interceptors)
return channel

Expand Down
39 changes: 29 additions & 10 deletions durabletask/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
import durabletask.internal.orchestrator_service_pb2 as pb
import durabletask.internal.orchestrator_service_pb2_grpc as stubs
import durabletask.internal.shared as shared

from durabletask import task
from durabletask.internal.grpc_interceptor import DefaultClientInterceptorImpl

TInput = TypeVar('TInput')
TOutput = TypeVar('TOutput')
Expand Down Expand Up @@ -88,7 +90,8 @@ def __init__(self, *,
metadata: Optional[list[tuple[str, str]]] = None,
log_handler=None,
log_formatter: Optional[logging.Formatter] = None,
secure_channel: bool = False):
secure_channel: bool = False,
interceptors: Optional[list[DefaultClientInterceptorImpl]] = None): # Add interceptors
self._registry = _Registry()
self._host_address = host_address if host_address else shared.get_default_host_address()
self._metadata = metadata
Expand All @@ -97,6 +100,17 @@ def __init__(self, *,
self._is_running = False
self._secure_channel = secure_channel

# Determine the interceptors to use
if interceptors is not None:
self._interceptors = interceptors
if metadata:
self._interceptors.append(DefaultClientInterceptorImpl(metadata))
elif self._metadata:
self._interceptors = [DefaultClientInterceptorImpl(self._metadata)]
else:
self._interceptors = None


def __enter__(self):
return self

Expand All @@ -117,7 +131,7 @@ def add_activity(self, fn: task.Activity) -> str:

def start(self):
"""Starts the worker on a background thread and begins listening for work items."""
channel = shared.get_grpc_channel(self._host_address, self._metadata, self._secure_channel)
channel = shared.get_grpc_channel(self._host_address, self._metadata, self._secure_channel, self._interceptors)
stub = stubs.TaskHubSidecarServiceStub(channel)

if self._is_running:
Expand All @@ -143,9 +157,11 @@ def run_loop():
request_type = work_item.WhichOneof('request')
self._logger.debug(f'Received "{request_type}" work item')
if work_item.HasField('orchestratorRequest'):
executor.submit(self._execute_orchestrator, work_item.orchestratorRequest, stub)
executor.submit(self._execute_orchestrator, work_item.orchestratorRequest, stub, work_item.completionToken)
elif work_item.HasField('activityRequest'):
executor.submit(self._execute_activity, work_item.activityRequest, stub)
executor.submit(self._execute_activity, work_item.activityRequest, stub, work_item.completionToken)
elif work_item.HasField('healthPing'):
pass # no-op
else:
self._logger.warning(f'Unexpected work item type: {request_type}')

Expand Down Expand Up @@ -184,39 +200,42 @@ def stop(self):
self._logger.info("Worker shutdown completed")
self._is_running = False

def _execute_orchestrator(self, req: pb.OrchestratorRequest, stub: stubs.TaskHubSidecarServiceStub):
def _execute_orchestrator(self, req: pb.OrchestratorRequest, stub: stubs.TaskHubSidecarServiceStub, completionToken):
try:
executor = _OrchestrationExecutor(self._registry, self._logger)
result = executor.execute(req.instanceId, req.pastEvents, req.newEvents)
res = pb.OrchestratorResponse(
instanceId=req.instanceId,
actions=result.actions,
customStatus=pbh.get_string_value(result.encoded_custom_status))
customStatus=pbh.get_string_value(result.encoded_custom_status),
completionToken=completionToken)
except Exception as ex:
self._logger.exception(f"An error occurred while trying to execute instance '{req.instanceId}': {ex}")
failure_details = pbh.new_failure_details(ex)
actions = [pbh.new_complete_orchestration_action(-1, pb.ORCHESTRATION_STATUS_FAILED, "", failure_details)]
res = pb.OrchestratorResponse(instanceId=req.instanceId, actions=actions)
res = pb.OrchestratorResponse(instanceId=req.instanceId, actions=actions, completionToken=completionToken)

try:
stub.CompleteOrchestratorTask(res)
except Exception as ex:
self._logger.exception(f"Failed to deliver orchestrator response for '{req.instanceId}' to sidecar: {ex}")

def _execute_activity(self, req: pb.ActivityRequest, stub: stubs.TaskHubSidecarServiceStub):
def _execute_activity(self, req: pb.ActivityRequest, stub: stubs.TaskHubSidecarServiceStub, completionToken):
instance_id = req.orchestrationInstance.instanceId
try:
executor = _ActivityExecutor(self._registry, self._logger)
result = executor.execute(instance_id, req.name, req.taskId, req.input.value)
res = pb.ActivityResponse(
instanceId=instance_id,
taskId=req.taskId,
result=pbh.get_string_value(result))
result=pbh.get_string_value(result),
completionToken=completionToken)
except Exception as ex:
res = pb.ActivityResponse(
instanceId=instance_id,
taskId=req.taskId,
failureDetails=pbh.new_failure_details(ex))
failureDetails=pbh.new_failure_details(ex),
completionToken=completionToken)

try:
stub.CompleteActivityTask(res)
Expand Down
2 changes: 1 addition & 1 deletion examples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ All the examples assume that you have a Durable Task-compatible sidecar running

1. Install the latest version of the [Dapr CLI](https://docs.dapr.io/getting-started/install-dapr-cli/), which contains and exposes an embedded version of the Durable Task engine. The setup process (which requires Docker) will configure the workflow engine to store state in a local Redis container.

1. Clone and run the [Durable Task Sidecar](https://github.com/microsoft/durabletask-go) project locally (requires Go 1.18 or higher). Orchestration state will be stored in a local sqlite database.
2. Clone and run the [Durable Task Sidecar](https://github.com/microsoft/durabletask-go) project locally (requires Go 1.18 or higher). Orchestration state will be stored in a local sqlite database.

## Running the examples

Expand Down
Loading
Loading