Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
3ac6e7a
merge into altk
boazdavid Dec 2, 2025
ec34c8b
continue merge
boazdavid Dec 3, 2025
7e77578
toolguard readme
boazdavid Dec 3, 2025
86d252f
readme
boazdavid Dec 3, 2025
91936c9
DCO Remediation Commit for DAVID BOAZ <DAVIDBO@il.ibm.com> I, DAVID …
boazdavid Dec 3, 2025
640ea37
DCO Remediation Commit for DAVID BOAZ <DAVIDBO@il.ibm.com>
boazdavid Dec 3, 2025
6b5c9dc
tg config\n Signed-off-by: DAVID BOAZ <DAVIDBO@il.ibm.com>
boazdavid Dec 4, 2025
f94d016
tg fix. Signed-off-by: DAVID BOAZ <DAVIDBO@il.ibm.com>
boazdavid Dec 4, 2025
d2cd389
tg fix
boazdavid Dec 4, 2025
f043eb6
tg fix
boazdavid Dec 4, 2025
a4df630
DCO Remediation Commit for DAVID BOAZ <DAVIDBO@il.ibm.com>
boazdavid Dec 7, 2025
deaa9a0
Merge branch 'main' into main
boazdavid Dec 7, 2025
ec005bc
toolguard oss
boazdavid Dec 9, 2025
1e663e1
Merge branch 'main' of https://github.com/boazdavid/agent-lifecycle-t…
boazdavid Dec 9, 2025
ce0bd8a
DCO Remediation Commit for DAVID BOAZ <DAVIDBO@il.ibm.com>
boazdavid Dec 9, 2025
62ec802
fix PR comments
boazdavid Dec 14, 2025
c5864af
Merge remote-tracking branch 'upstream/main'
boazdavid Dec 21, 2025
10b325f
fix merge
boazdavid Dec 21, 2025
1a6d9bc
fix merge
boazdavid Dec 21, 2025
b9c6571
DCO Remediation Commit for DAVID BOAZ <DAVIDBO@il.ibm.com>
boazdavid Dec 21, 2025
f4ef5c0
fix test
boazdavid Dec 25, 2025
6738fcb
toolguard extra
boazdavid Dec 25, 2025
f4dc759
Merge remote-tracking branch 'upstream/main'
boazdavid Dec 25, 2025
c54078d
closed models only
boazdavid Dec 25, 2025
c1f6a2e
toolguard dev
boazdavid Dec 29, 2025
ef00372
readme
boazdavid Dec 30, 2025
3f8f9fb
toolguard
boazdavid Dec 30, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions altk/core/llm/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,12 @@ def provider_class(cls) -> Type[Any]:
Underlying SDK client class, e.g. openai.OpenAI or litellm.LiteLLM.
"""

@abstractmethod
def get_model_id(self) -> str|None:
"""
returns the id of the model. eg: "gpt-5.1", "meta-llama/llama-4-maverick-17b-128e-instruct-fp8", and "claude-4-sonnet"
"""

@abstractmethod
def _register_methods(self) -> None:
"""
Expand Down
4 changes: 4 additions & 0 deletions altk/core/llm/providers/auto_from_env/auto_from_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@ def __init__(self) -> None:
def provider_class(cls) -> Type[Any]:
raise NotImplementedError

def get_model_id(self) -> str|None:
if self._chosen_provider:
return self._chosen_provider.get_model_id()

def _register_methods(self) -> None:
if self._chosen_provider:
self._chosen_provider._register_methods()
Expand Down
6 changes: 6 additions & 0 deletions altk/core/llm/providers/ibm_watsonx_ai/ibm_watsonx_ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,9 @@ def provider_class(cls) -> Type[Any]:
"""
return ModelInference # type: ignore

def get_model_id(self) -> str:
return self.model_name # type: ignore

def _register_methods(self) -> None:
"""
Register how to call watsonx methods:
Expand Down Expand Up @@ -471,6 +474,9 @@ def provider_class(cls) -> Type[Any]:
"""
return ModelInference # type: ignore

def get_model_id(self) -> str:
return self.model_name # type: ignore

def _register_methods(self) -> None:
"""
Register how to call watsonx methods for validation:
Expand Down
6 changes: 6 additions & 0 deletions altk/core/llm/providers/litellm/litellm.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@ def __init__(
@classmethod
def provider_class(cls) -> type:
return litellm # type: ignore

def get_model_id(self) -> str:
return self.model_path

def _register_methods(self) -> None:
"""Register LiteLLM methods - only chat and chat_async are supported"""
Expand Down Expand Up @@ -302,6 +305,9 @@ def provider_class(cls) -> Type[Any]:
Must be callable with no arguments (per LLMClient __init__ logic).
"""
return litellm # type: ignore

def get_model_id(self) -> str:
return self.model_path

def _register_methods(self) -> None:
"""
Expand Down
7 changes: 7 additions & 0 deletions altk/core/llm/providers/openai/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,9 @@ def transform_min_tokens(value: Any, mode: Any) -> dict[str, Any]:
)
self._parameter_mapper.set_custom_transform("min_tokens", transform_min_tokens)

def get_model_id(self) -> str|None:
if self._other_kwargs:
return self._other_kwargs.get("model")

class BaseValidatingOpenAIClient(ValidatingLLMClient):
"""Base class for validating OpenAI and Azure OpenAI clients with shared parameter mapping"""
Expand Down Expand Up @@ -146,6 +149,10 @@ def transform_min_tokens(value: Any, mode: Any) -> dict[str, Any]:
)
self._parameter_mapper.set_custom_transform("min_tokens", transform_min_tokens)

def get_model_id(self) -> str|None:
if self._other_kwargs:
return self._other_kwargs.get("model")


@register_llm("openai.sync")
class SyncOpenAIClient(BaseOpenAIClient, BaseLLMClient):
Expand Down
2 changes: 1 addition & 1 deletion altk/pre_llm/core/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,4 +125,4 @@ def get_topics(
n_results: int = 10,
query_kwargs: Dict[str, Any] | None = None,
distance_threshold: float | None = None,
) -> List[RetrievedTopic]: ...
) -> List[RetrievedTopic]: ...
42 changes: 1 addition & 41 deletions altk/pre_tool/core/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,44 +90,4 @@ class SPARCReflectionRunOutput(PreToolReflectionRunOutput):

output: SPARCReflectionRunOutputSchema = Field(
default_factory=lambda: SPARCReflectionRunOutputSchema()
)


class ToolGuardBuildInputMetaData(BaseModel):
model_config = ConfigDict(arbitrary_types_allowed=True)
policy_text: str = Field(description="Text of the policy document file")
short1: bool = Field(default=True, description="Run build short or long version. ")
validating_llm_client: LLMClient = Field(
description="ValidatingLLMClient for build time"
)


class ToolGuardBuildInput(ComponentInput):
metadata: ToolGuardBuildInputMetaData = Field(
default_factory=lambda: ToolGuardBuildInputMetaData()
)


class ToolGuardRunInputMetaData(BaseModel):
model_config = ConfigDict(arbitrary_types_allowed=True)
tool_name: str = Field(description="Tool name")
tool_parms: dict = Field(default={}, description="Tool parameters")
llm_client: LLMClient = Field(description="LLMClient for build time")


class ToolGuardRunInput(ComponentInput):
metadata: ToolGuardRunInputMetaData = Field(
default_factory=lambda: ToolGuardRunInputMetaData()
)


class ToolGuardRunOutputMetaData(BaseModel):
error_message: Union[str, bool] = Field(
description="Error string or False if no error occurred"
)


class ToolGuardRunOutput(ComponentOutput):
output: ToolGuardRunOutputMetaData = Field(
default_factory=lambda: ToolGuardRunOutputMetaData()
)
)
Empty file.
81 changes: 0 additions & 81 deletions altk/pre_tool/examples/calculator_example/example_tools.py

This file was deleted.

30 changes: 0 additions & 30 deletions altk/pre_tool/examples/calculator_example/policy_document.md

This file was deleted.

64 changes: 0 additions & 64 deletions altk/pre_tool/examples/calculator_example/run_example.py

This file was deleted.

51 changes: 0 additions & 51 deletions altk/pre_tool/examples/tool_guard_example.py

This file was deleted.

Loading