Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
203 changes: 203 additions & 0 deletions schemas/v1/_schemas_v1_core_webhook-payload_json.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,203 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "/schemas/v1/core/webhook-payload.json",
"title": "Webhook Payload",
"description": "Payload structure sent to webhook endpoints when async task status changes. Combines protocol-level task metadata with domain-specific response data. This schema represents what your webhook handler will receive when a task transitions from 'submitted' to a terminal or intermediate state.",
"type": "object",
"properties": {
"task_id": {
"type": "string",
"description": "Unique identifier for this task. Use this to correlate webhook notifications with the original task submission."
},
"task_type": {
"$ref": "/schemas/v1/enums/task-type.json",
"description": "Type of AdCP operation that triggered this webhook. Enables webhook handlers to route to appropriate processing logic."
},
"domain": {
"type": "string",
"description": "AdCP domain this task belongs to. Helps classify the operation type at a high level.",
"enum": [
"media-buy",
"signals"
]
},
"status": {
"$ref": "/schemas/v1/enums/task-status.json",
"description": "Current task status. Webhooks are only triggered for status changes after initial submission (e.g., submitted \u2192 input-required, submitted \u2192 completed, submitted \u2192 failed)."
},
"created_at": {
"type": "string",
"format": "date-time",
"description": "ISO 8601 timestamp when the task was initially created. Useful for tracking operation duration."
},
"updated_at": {
"type": "string",
"format": "date-time",
"description": "ISO 8601 timestamp when the task status was last updated. This matches the timestamp when the webhook was triggered."
},
"completed_at": {
"type": "string",
"format": "date-time",
"description": "ISO 8601 timestamp when the task reached a terminal state (completed, failed, or canceled). Only present for terminal states."
},
"message": {
"type": "string",
"description": "Human-readable summary of the current task state. Provides context about what happened and what action may be needed."
},
"context_id": {
"type": "string",
"description": "Session/conversation identifier. Use this to continue the conversation if input-required status needs clarification or additional parameters."
},
"progress": {
"type": "object",
"description": "Progress information for tasks still in 'working' state. Rarely seen in webhooks since 'working' tasks typically complete synchronously, but may appear if a task transitions from 'submitted' to 'working'.",
"properties": {
"percentage": {
"type": "number",
"minimum": 0,
"maximum": 100,
"description": "Completion percentage (0-100)"
},
"current_step": {
"type": "string",
"description": "Current step or phase of the operation"
},
"total_steps": {
"type": "integer",
"minimum": 1,
"description": "Total number of steps in the operation"
},
"step_number": {
"type": "integer",
"minimum": 1,
"description": "Current step number"
}
},
"additionalProperties": false
},
"error": {
"type": "object",
"description": "Error details for failed tasks. Only present when status is 'failed'.",
"properties": {
"code": {
"type": "string",
"description": "Error code for programmatic handling"
},
"message": {
"type": "string",
"description": "Detailed error message"
},
"details": {
"type": "object",
"description": "Additional error context",
"properties": {
"domain": {
"type": "string",
"description": "AdCP domain where error occurred",
"enum": [
"media-buy",
"signals"
]
},
"operation": {
"type": "string",
"description": "Specific operation that failed"
},
"specific_context": {
"type": "object",
"description": "Domain-specific error context",
"additionalProperties": true
}
},
"additionalProperties": true
}
},
"required": [
"code",
"message"
],
"additionalProperties": false
}
},
"required": [
"task_id",
"task_type",
"domain",
"status",
"created_at",
"updated_at"
],
"additionalProperties": true,
"notes": [
"Webhooks are ONLY triggered when the initial response status is 'submitted' (long-running operations)",
"Webhook payloads include protocol-level fields (task_id, task_type, domain, status, timestamps) PLUS the full task-specific response data",
"The task-specific response data is merged at the top level of the webhook payload (not nested in a 'payload' field)",
"For example, a create_media_buy webhook will include task_id, task_type, domain, status, AND media_buy_id, packages, creative_deadline, etc.",
"Your webhook handler receives the complete information needed to process the result without making additional API calls"
],
"examples": [
{
"description": "Webhook for input-required status (human approval needed)",
"data": {
"task_id": "task_456",
"task_type": "create_media_buy",
"domain": "media-buy",
"status": "input-required",
"created_at": "2025-01-22T10:00:00Z",
"updated_at": "2025-01-22T10:15:00Z",
"context_id": "ctx_abc123",
"message": "Campaign budget $150K requires VP approval to proceed",
"buyer_ref": "nike_q1_campaign_2024"
}
},
{
"description": "Webhook for completed create_media_buy",
"data": {
"task_id": "task_456",
"task_type": "create_media_buy",
"domain": "media-buy",
"status": "completed",
"created_at": "2025-01-22T10:00:00Z",
"updated_at": "2025-01-22T10:30:00Z",
"completed_at": "2025-01-22T10:30:00Z",
"message": "Media buy created successfully with 2 packages ready for creative assignment",
"media_buy_id": "mb_12345",
"buyer_ref": "nike_q1_campaign_2024",
"creative_deadline": "2024-01-30T23:59:59Z",
"packages": [
{
"package_id": "pkg_12345_001",
"buyer_ref": "nike_ctv_package"
}
]
}
},
{
"description": "Webhook for failed sync_creatives",
"data": {
"task_id": "task_789",
"task_type": "sync_creatives",
"domain": "media-buy",
"status": "failed",
"created_at": "2025-01-22T10:45:00Z",
"updated_at": "2025-01-22T10:46:00Z",
"completed_at": "2025-01-22T10:46:00Z",
"message": "Creative sync failed due to invalid asset URLs",
"error": {
"code": "invalid_assets",
"message": "One or more creative assets could not be accessed",
"details": {
"domain": "media-buy",
"operation": "sync_creatives",
"specific_context": {
"failed_creatives": [
"creative_001",
"creative_003"
]
}
}
}
}
}
]
}
6 changes: 6 additions & 0 deletions schemas/v1/_schemas_v1_core_webhook-payload_json.json.meta
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"etag": "W/\"68ffaa97-1ce0\"",
"last-modified": "Mon, 27 Oct 2025 17:23:35 GMT",
"downloaded_at": "2025-10-27T20:50:42.491975",
"schema_ref": "/schemas/v1/core/webhook-payload.json"
}
27 changes: 27 additions & 0 deletions schemas/v1/_schemas_v1_enums_task-type_json.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "/schemas/v1/enums/task-type.json",
"title": "Task Type",
"description": "Valid AdCP task types across all domains. These represent the complete set of operations that can be tracked via the task management system.",
"type": "string",
"enum": [
"create_media_buy",
"update_media_buy",
"sync_creatives",
"activate_signal",
"get_signals"
],
"enumDescriptions": {
"create_media_buy": "Media-buy domain: Create a new advertising campaign with one or more packages",
"update_media_buy": "Media-buy domain: Update campaign settings, package configuration, or delivery parameters",
"sync_creatives": "Media-buy domain: Sync creative assets to publisher's library with upsert semantics",
"activate_signal": "Signals domain: Activate an audience signal on a specific platform or account",
"get_signals": "Signals domain: Discover available audience signals based on natural language description"
},
"notes": [
"Task types map to specific AdCP task operations",
"Each task type belongs to either the 'media-buy' or 'signals' domain",
"This enum is used in task management APIs (tasks/list, tasks/get) and webhook payloads",
"New task types require a minor version bump per semantic versioning"
]
}
6 changes: 6 additions & 0 deletions schemas/v1/_schemas_v1_enums_task-type_json.json.meta
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"etag": "W/\"68ffaa97-531\"",
"last-modified": "Mon, 27 Oct 2025 17:23:35 GMT",
"downloaded_at": "2025-10-27T20:50:43.126772",
"schema_ref": "/schemas/v1/enums/task-type.json"
}
67 changes: 48 additions & 19 deletions scripts/generate_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,19 +144,19 @@ def resolve_refs_in_schema(schema: dict, schema_dir: Path, visited: set | None =

def add_etag_metadata_to_generated_files(output_dir: Path, schema_dir: Path):
"""
Add source schema ETag metadata to generated Python files.
Add source schema content hash to generated Python files.

This replaces the generation timestamp (which changes on every run)
with the source schema's ETag (which only changes when schema changes).
Uses content-based hashing instead of server ETags to avoid noise from
server restarts that regenerate schemas with new ETags but identical content.
"""
generated_files = list(output_dir.glob("_schemas_*.py"))
updated_count = 0
skipped_count = 0

for py_file in generated_files:
# Find corresponding .meta file
# Find corresponding JSON schema file
# Python file: _schemas_v1_core_creative_asset_json.py (underscores)
# JSON file: _schemas_v1_core_creative-asset_json.json (hyphens)
# Meta file: _schemas_v1_core_creative-asset_json.json.meta
#
# datamodel-codegen converts hyphens to underscores in filenames,
# so we need to convert back to find the original JSON file
Expand All @@ -168,52 +168,66 @@ def add_etag_metadata_to_generated_files(output_dir: Path, schema_dir: Path):

# Look for: # filename: _schemas_v1_core_creative-asset_json.json
original_json_filename = None
existing_content_hash = None
for line in header_lines:
if line.strip().startswith("# filename:"):
original_json_filename = line.split(":", 1)[1].strip()
break
elif line.strip().startswith("# schema_hash:"):
existing_content_hash = line.split(":", 1)[1].strip()

if not original_json_filename:
continue

meta_file = schema_dir / f"{original_json_filename}.meta"
schema_file = schema_dir / original_json_filename

if not meta_file.exists():
if not schema_file.exists():
continue

# Load ETag from .meta file
# Compute hash of actual schema content (not ETag)
try:
with open(meta_file) as f:
metadata = json.load(f)
etag = metadata.get("etag", "unknown")
last_modified = metadata.get("last-modified", "unknown")
with open(schema_file) as f:
schema_data = json.load(f)
# Normalize JSON to consistent format for hashing
normalized = json.dumps(schema_data, sort_keys=True)
content_hash = hashlib.md5(normalized.encode()).hexdigest()[:12]
except (json.JSONDecodeError, OSError):
continue

# Skip update if schema content hasn't changed
if existing_content_hash == content_hash:
skipped_count += 1
continue

# Read generated file
with open(py_file) as f:
content = f.read()

# Add ETag comment after the datamodel-codegen header
# Add schema hash comment after the datamodel-codegen header
# Look for the pattern:
# # generated by datamodel-codegen:
# # filename: ...
#
# And insert after it:
# # source_etag: W/"..."
# # source_last_modified: ...
# # schema_hash: abc123...

lines = content.split("\n")
new_lines = []
inserted = False

for line in lines:
# Skip existing schema_hash/source_etag/source_last_modified lines
if (
line.strip().startswith("# schema_hash:")
or line.strip().startswith("# source_etag:")
or line.strip().startswith("# source_last_modified:")
):
continue

new_lines.append(line)

# Insert after the filename line
if not inserted and line.startswith("# filename:"):
new_lines.append(f"# source_etag: {etag}")
new_lines.append(f"# source_last_modified: {last_modified}")
new_lines.append(f"# schema_hash: {content_hash}")
inserted = True

if inserted:
Expand All @@ -222,7 +236,8 @@ def add_etag_metadata_to_generated_files(output_dir: Path, schema_dir: Path):
f.write("\n".join(new_lines))
updated_count += 1

print(f"✅ Added ETag metadata to {updated_count} generated files")
print(f"✅ Updated {updated_count} generated files with new schema hashes")
print(f"⏭️ Skipped {skipped_count} files (schema content unchanged)")


def compute_schema_hash(schema_dir: Path) -> str:
Expand Down Expand Up @@ -253,6 +268,20 @@ def generate_schemas_from_json(schema_dir: Path, output_file: Path):
"""
print(f"📂 Processing schemas from: {schema_dir}")

# Check if schema content has changed since last generation
current_schema_hash = compute_schema_hash(schema_dir)
init_file = output_file / "__init__.py"

if init_file.exists():
with open(init_file) as f:
first_line = f.readline()
if first_line.startswith("# SCHEMA_HASH:"):
existing_hash = first_line.split(":", 1)[1].strip()
if existing_hash == current_schema_hash:
print(f"⏭️ Schema content unchanged (hash: {current_schema_hash[:8]}...)")
print("⏭️ Skipping generation - generated files are up to date")
return

# Create temporary directory for resolved schemas
temp_dir = Path("temp_resolved_schemas")
temp_dir.mkdir(exist_ok=True)
Expand Down
Loading