Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions alembic/versions/e38f2f6f395a_add_mock_manual_approval_required.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
"""add_mock_manual_approval_required

Revision ID: e38f2f6f395a
Revises: faaed3b71428
Create Date: 2025-10-23 20:06:20.766732

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = 'e38f2f6f395a'
down_revision: Union[str, Sequence[str], None] = 'faaed3b71428'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
# Add mock_manual_approval_required column to adapter_config table
op.add_column('adapter_config', sa.Column('mock_manual_approval_required', sa.Boolean(), nullable=True))

# Set default value to False for existing rows
op.execute("UPDATE adapter_config SET mock_manual_approval_required = false WHERE adapter_type = 'mock'")

# Make the column non-nullable after setting defaults
op.alter_column('adapter_config', 'mock_manual_approval_required', nullable=False, server_default=sa.false())


def downgrade() -> None:
"""Downgrade schema."""
# Remove mock_manual_approval_required column
op.drop_column('adapter_config', 'mock_manual_approval_required')
32 changes: 19 additions & 13 deletions run_all_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,15 @@ echo ""
setup_docker_stack() {
echo -e "${BLUE}🐳 Starting complete Docker stack (PostgreSQL + servers)...${NC}"

# Clean up any existing containers/volumes
echo "Cleaning up any existing Docker services and volumes..."
docker-compose down -v 2>/dev/null || true
docker volume prune -f 2>/dev/null || true
# Use unique project name to isolate from local dev environment
# This ensures test containers don't interfere with your running local containers
local TEST_PROJECT_NAME="adcp-test-$$" # $$ = process ID, ensures uniqueness
export COMPOSE_PROJECT_NAME="$TEST_PROJECT_NAME"

# Clean up ONLY this test project's containers/volumes (not your local dev!)
echo "Cleaning up any existing TEST containers (project: $TEST_PROJECT_NAME)..."
docker-compose -p "$TEST_PROJECT_NAME" down -v 2>/dev/null || true
# DO NOT run docker volume prune - that affects ALL Docker volumes!

# If ports are still in use, find new ones
if lsof -i :${POSTGRES_PORT} >/dev/null 2>&1; then
Expand Down Expand Up @@ -116,15 +121,15 @@ print(' '.join(map(str, ports)))

# Build and start services
echo "Building Docker images (this may take 2-3 minutes on first run)..."
if ! docker-compose build --progress=plain 2>&1 | grep -E "(Step|#|Building|exporting)" | tail -20; then
if ! docker-compose -p "$TEST_PROJECT_NAME" build --progress=plain 2>&1 | grep -E "(Step|#|Building|exporting)" | tail -20; then
echo -e "${RED}❌ Docker build failed${NC}"
exit 1
fi

echo "Starting Docker services..."
if ! docker-compose up -d; then
if ! docker-compose -p "$TEST_PROJECT_NAME" up -d; then
echo -e "${RED}❌ Docker services failed to start${NC}"
docker-compose logs
docker-compose -p "$TEST_PROJECT_NAME" logs
exit 1
fi

Expand All @@ -143,7 +148,7 @@ print(' '.join(map(str, ports)))
fi

# Check PostgreSQL
if docker-compose exec -T postgres pg_isready -U adcp_user >/dev/null 2>&1; then
if docker-compose -p "$TEST_PROJECT_NAME" exec -T postgres pg_isready -U adcp_user >/dev/null 2>&1; then
echo -e "${GREEN}✓ PostgreSQL is ready (${elapsed}s)${NC}"
break
fi
Expand All @@ -154,12 +159,12 @@ print(' '.join(map(str, ports)))
# Run migrations
echo "Running database migrations..."
# Use docker-compose exec to run migrations inside the container
if ! docker-compose exec -T postgres psql -U adcp_user -d postgres -c "CREATE DATABASE adcp_test" 2>/dev/null; then
if ! docker-compose -p "$TEST_PROJECT_NAME" exec -T postgres psql -U adcp_user -d postgres -c "CREATE DATABASE adcp_test" 2>/dev/null; then
echo "Database adcp_test already exists, continuing..."
fi

# Export for tests
export DATABASE_URL="postgresql://adcp_user:test_password@localhost:${POSTGRES_PORT}/adcp_test"
# Export for tests - MUST match docker-compose.yml POSTGRES_PASSWORD
export DATABASE_URL="postgresql://adcp_user:secure_password_change_me@localhost:${POSTGRES_PORT}/adcp_test"

echo -e "${GREEN}✓ Docker stack is ready${NC}"
echo " PostgreSQL: localhost:${POSTGRES_PORT}"
Expand All @@ -170,8 +175,9 @@ print(' '.join(map(str, ports)))

# Docker teardown function
teardown_docker_stack() {
echo -e "${BLUE}🐳 Stopping Docker stack...${NC}"
docker-compose down -v 2>/dev/null || true
echo -e "${BLUE}🐳 Stopping TEST Docker stack (project: $COMPOSE_PROJECT_NAME)...${NC}"
docker-compose -p "$COMPOSE_PROJECT_NAME" down -v 2>/dev/null || true
echo -e "${GREEN}✓ Test containers cleaned up (your local dev containers are untouched)${NC}"
}

# Trap to ensure cleanup on exit
Expand Down
32 changes: 29 additions & 3 deletions src/a2a_server/adcp_a2a_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -2215,6 +2215,18 @@ def main():
extended_agent_card_url="/agent.json",
)

# Add CORS middleware for browser compatibility (must be added early to wrap all responses)
from starlette.middleware.cors import CORSMiddleware

app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Allow all origins for A2A protocol
allow_credentials=True,
allow_methods=["*"], # Allow all HTTP methods
allow_headers=["*"], # Allow all headers
)
logger.info("CORS middleware enabled for browser compatibility")

# Override the agent card endpoints to support tenant-specific URLs
def create_dynamic_agent_card(request) -> AgentCard:
"""Create agent card with tenant-specific URL from request headers."""
Expand Down Expand Up @@ -2265,12 +2277,26 @@ def get_protocol(hostname: str) -> str:

async def dynamic_agent_discovery(request):
"""Override for /.well-known/agent.json with tenant-specific URL."""
from starlette.responses import Response

# Handle OPTIONS preflight requests (CORS middleware will add headers)
if request.method == "OPTIONS":
return Response(status_code=204)

dynamic_card = create_dynamic_agent_card(request)
# CORS middleware automatically adds CORS headers
return JSONResponse(dynamic_card.model_dump())

async def dynamic_agent_card_endpoint(request):
"""Override for /agent.json with tenant-specific URL."""
from starlette.responses import Response

# Handle OPTIONS preflight requests (CORS middleware will add headers)
if request.method == "OPTIONS":
return Response(status_code=204)

dynamic_card = create_dynamic_agent_card(request)
# CORS middleware automatically adds CORS headers
return JSONResponse(dynamic_card.model_dump())

# Find and replace the existing routes to ensure proper A2A specification compliance
Expand All @@ -2279,15 +2305,15 @@ async def dynamic_agent_card_endpoint(request):
if hasattr(route, "path"):
if route.path == "/.well-known/agent.json":
# Replace with our dynamic endpoint (legacy compatibility)
new_routes.append(Route("/.well-known/agent.json", dynamic_agent_discovery, methods=["GET"]))
new_routes.append(Route("/.well-known/agent.json", dynamic_agent_discovery, methods=["GET", "OPTIONS"]))
logger.info("Replaced /.well-known/agent.json with dynamic version")
elif route.path == "/.well-known/agent-card.json":
# Replace with our dynamic endpoint (primary A2A discovery)
new_routes.append(Route("/.well-known/agent-card.json", dynamic_agent_discovery, methods=["GET"]))
new_routes.append(Route("/.well-known/agent-card.json", dynamic_agent_discovery, methods=["GET", "OPTIONS"]))
logger.info("Replaced /.well-known/agent-card.json with dynamic version")
elif route.path == "/agent.json":
# Replace with our dynamic endpoint
new_routes.append(Route("/agent.json", dynamic_agent_card_endpoint, methods=["GET"]))
new_routes.append(Route("/agent.json", dynamic_agent_card_endpoint, methods=["GET", "OPTIONS"]))
logger.info("Replaced /agent.json with dynamic version")
else:
new_routes.append(route)
Expand Down
41 changes: 41 additions & 0 deletions src/adapters/mock_ad_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -706,10 +706,23 @@ def _create_media_buy_immediate(
else:
self.log(f"Would return: Campaign ID '{media_buy_id}' with status 'pending_creative'")

# Build packages response with buyer_ref from original request
response_packages = []
for idx, pkg in enumerate(packages):
pkg_dict = pkg.model_dump()
self.log(f"[DEBUG] MockAdapter: Package {idx} model_dump() = {pkg_dict}")
self.log(f"[DEBUG] MockAdapter: Package {idx} has package_id = {pkg_dict.get('package_id')}")
# Add buyer_ref from original request package if available
if request.packages and idx < len(request.packages):
pkg_dict["buyer_ref"] = request.packages[idx].buyer_ref
response_packages.append(pkg_dict)

self.log(f"[DEBUG] MockAdapter: Returning {len(response_packages)} packages in response")
return CreateMediaBuyResponse(
buyer_ref=request.buyer_ref, # Required field per AdCP spec
media_buy_id=media_buy_id,
creative_deadline=datetime.now(UTC) + timedelta(days=2),
packages=response_packages, # Include packages with buyer_ref
errors=None, # No errors for successful mock response
)

Expand Down Expand Up @@ -1114,6 +1127,34 @@ def update_media_buy(
budget: int | None,
today: datetime,
) -> UpdateMediaBuyResponse:
"""Update media buy in database (Mock adapter implementation)."""
import logging
from sqlalchemy import select
from sqlalchemy.orm import attributes

from src.core.database.database_session import get_db_session
from src.core.database.models import MediaBuy, MediaPackage

logger = logging.getLogger(__name__)

with get_db_session() as session:
if action == "update_package_budget" and package_id and budget is not None:
# Update package budget in MediaPackage.package_config JSON
stmt = select(MediaPackage).where(
MediaPackage.package_id == package_id,
MediaPackage.media_buy_id == media_buy_id
)
media_package = session.scalars(stmt).first()
if media_package:
# Update budget in package_config JSON
media_package.package_config["budget"] = float(budget)
# Flag the JSON field as modified so SQLAlchemy persists it
attributes.flag_modified(media_package, "package_config")
session.commit()
logger.info(f"[MockAdapter] Updated package {package_id} budget to {budget} in database")
else:
logger.warning(f"[MockAdapter] Package {package_id} not found for media buy {media_buy_id}")

return UpdateMediaBuyResponse(media_buy_id=media_buy_id, buyer_ref=buyer_ref)

def get_config_ui_endpoint(self) -> str | None:
Expand Down
70 changes: 37 additions & 33 deletions src/admin/blueprints/creatives.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,30 +330,32 @@ def approve_creative(tenant_id, creative_id, **kwargs):

db_session.commit()

# Find webhook_url from workflow step if it exists
from src.core.database.models import ObjectWorkflowMapping, WorkflowStep

stmt = select(ObjectWorkflowMapping).filter_by(object_type="creative", object_id=creative_id)
mapping = db_session.scalars(stmt).first()

webhook_url = None
if mapping:
stmt = select(WorkflowStep).filter_by(step_id=mapping.step_id)
workflow_step = db_session.scalars(stmt).first()
if workflow_step and workflow_step.request_data:
webhook_url = workflow_step.request_data.get("webhook_url")

# Call webhook if configured
if webhook_url:
creative_data = {
# Send webhook notification to principal
from src.core.database.models import PushNotificationConfig

stmt_webhook = select(PushNotificationConfig).filter_by(
tenant_id=tenant_id,
principal_id=creative.principal_id,
is_active=True
).order_by(PushNotificationConfig.created_at.desc())
webhook_config = db_session.scalars(stmt_webhook).first()

if webhook_config:
import requests
webhook_payload = {
"event": "creative_approved",
"creative_id": creative.creative_id,
"name": creative.name,
"format": creative.format,
"status": "approved",
"approved_by": approved_by,
"approved_at": creative.approved_at.isoformat(),
}
_call_webhook_for_creative_status(webhook_url, creative_id, "approved", creative_data, tenant_id)
try:
requests.post(webhook_config.url, json=webhook_payload, timeout=10)
logger.info(f"Sent webhook notification for approved creative {creative_id}")
except Exception as webhook_err:
logger.warning(f"Failed to send creative approval webhook: {webhook_err}")

# Send Slack notification if configured
stmt_tenant = select(Tenant).filter_by(tenant_id=tenant_id)
Expand Down Expand Up @@ -475,22 +477,20 @@ def reject_creative(tenant_id, creative_id, **kwargs):

db_session.commit()

# Find webhook_url from workflow step if it exists
from src.core.database.models import ObjectWorkflowMapping, WorkflowStep

stmt = select(ObjectWorkflowMapping).filter_by(object_type="creative", object_id=creative_id)
mapping = db_session.scalars(stmt).first()
# Send webhook notification to principal
from src.core.database.models import PushNotificationConfig

webhook_url = None
if mapping:
stmt = select(WorkflowStep).filter_by(step_id=mapping.step_id)
workflow_step = db_session.scalars(stmt).first()
if workflow_step and workflow_step.request_data:
webhook_url = workflow_step.request_data.get("webhook_url")

# Call webhook if configured
if webhook_url:
creative_data = {
stmt_webhook = select(PushNotificationConfig).filter_by(
tenant_id=tenant_id,
principal_id=creative.principal_id,
is_active=True
).order_by(PushNotificationConfig.created_at.desc())
webhook_config = db_session.scalars(stmt_webhook).first()

if webhook_config:
import requests
webhook_payload = {
"event": "creative_rejected",
"creative_id": creative.creative_id,
"name": creative.name,
"format": creative.format,
Expand All @@ -499,7 +499,11 @@ def reject_creative(tenant_id, creative_id, **kwargs):
"rejection_reason": rejection_reason,
"rejected_at": creative.data["rejected_at"],
}
_call_webhook_for_creative_status(webhook_url, creative_id, "rejected", creative_data, tenant_id)
try:
requests.post(webhook_config.url, json=webhook_payload, timeout=10)
logger.info(f"Sent webhook notification for rejected creative {creative_id}")
except Exception as webhook_err:
logger.warning(f"Failed to send creative rejection webhook: {webhook_err}")

# Send Slack notification if configured
stmt_tenant = select(Tenant).filter_by(tenant_id=tenant_id)
Expand Down
Loading