Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
175 changes: 175 additions & 0 deletions .codeboarding/AI_Services_Integration.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
{
"description": "The `pipelex` project leverages a modular architecture to integrate and manage various AI capabilities. The `Cognitive Tools / AI Operations` component, primarily embodied by the `InferenceManager`, acts as the central orchestrator for all AI-related tasks. It provides a high-level interface for requesting services from Large Language Models (LLMs), Optical Character Recognition (OCR), and Image Generation models. This component is responsible for routing these requests to the appropriate AI workers and managing their lifecycle. The `AI Model Adapters / Integrations` component, managed by the `PluginManager`, houses the concrete implementations for interacting with diverse third-party AI service providers. These adapters abstract away the complexities of each provider's API, enabling the `InferenceManager` to interact with different AI models seamlessly without needing to know their underlying specifics. This clear separation of concerns allows for easy extension and integration of new AI models and providers.",
"components": [
{
"name": "Cognitive Tools / AI Operations",
"description": "This component provides a unified, abstract interface for interacting with various AI models (LLMs, OCR, Image Generation) and manages the routing and inference operations. It acts as a high-level orchestrator for AI capabilities within the pipeline, abstracting the underlying AI service providers. It serves as the primary entry point for the rest of the system to request AI services without needing to know the specifics of the underlying models or providers.",
"referenced_source_code": [
{
"qualified_name": "pipelex.cogt.inference.inference_manager.InferenceManager",
"reference_file": "pipelex/cogt/inference/inference_manager.py",
"reference_start_line": 18,
"reference_end_line": 149
}
],
"assigned_files": [
"pipelex/cogt/__init__.py",
"pipelex/cogt/config_cogt.py",
"pipelex/cogt/exceptions.py",
"pipelex/cogt/ocr/__init__.py",
"pipelex/cogt/ocr/ocr_worker_abstract.py",
"pipelex/cogt/ocr/ocr_job_components.py",
"pipelex/cogt/ocr/ocr_input.py",
"pipelex/cogt/ocr/ocr_setting.py",
"pipelex/cogt/ocr/ocr_job.py",
"pipelex/cogt/ocr/ocr_job_factory.py",
"pipelex/cogt/ocr/ocr_output.py",
"pipelex/cogt/ocr/ocr_worker_factory.py",
"pipelex/cogt/inference/__init__.py",
"pipelex/cogt/inference/inference_job_abstract.py",
"pipelex/cogt/inference/inference_manager_protocol.py",
"pipelex/cogt/inference/inference_manager.py",
"pipelex/cogt/inference/inference_worker_abstract.py",
"pipelex/cogt/model_backends/backend.py",
"pipelex/cogt/model_backends/backend_factory.py",
"pipelex/cogt/model_backends/__init__.py",
"pipelex/cogt/model_backends/model_constraints.py",
"pipelex/cogt/model_backends/model_spec_factory.py",
"pipelex/cogt/model_backends/prompting_target.py",
"pipelex/cogt/model_backends/model_lists.py",
"pipelex/cogt/model_backends/backend_library.py",
"pipelex/cogt/model_backends/model_spec.py",
"pipelex/cogt/model_backends/model_type.py",
"pipelex/cogt/image/prompt_image.py",
"pipelex/cogt/image/__init__.py",
"pipelex/cogt/image/prompt_image_factory.py",
"pipelex/cogt/image/generated_image.py",
"pipelex/cogt/usage/__init__.py",
"pipelex/cogt/usage/costs_per_token.py",
"pipelex/cogt/usage/cost_registry.py",
"pipelex/cogt/usage/token_category.py",
"pipelex/cogt/usage/cost_category.py",
"pipelex/cogt/llm/llm_job.py",
"pipelex/cogt/llm/llm_job_components.py",
"pipelex/cogt/llm/llm_prompt_template.py",
"pipelex/cogt/llm/__init__.py",
"pipelex/cogt/llm/llm_worker_abstract.py",
"pipelex/cogt/llm/llm_setting.py",
"pipelex/cogt/llm/llm_worker_factory.py",
"pipelex/cogt/llm/llm_prompt_spec.py",
"pipelex/cogt/llm/llm_worker_internal_abstract.py",
"pipelex/cogt/llm/llm_job_factory.py",
"pipelex/cogt/llm/llm_prompt_factory_abstract.py",
"pipelex/cogt/llm/llm_report.py",
"pipelex/cogt/llm/llm_prompt.py",
"pipelex/cogt/llm/structured_output.py",
"pipelex/cogt/llm/llm_prompt_template_inputs.py",
"pipelex/cogt/model_routing/routing_models.py",
"pipelex/cogt/model_routing/__init__.py",
"pipelex/cogt/model_routing/routing_profile_factory.py",
"pipelex/cogt/model_routing/routing_profile_library.py",
"pipelex/cogt/model_routing/routing_profile.py",
"pipelex/cogt/content_generation/__init__.py",
"pipelex/cogt/content_generation/content_generator_dry.py",
"pipelex/cogt/content_generation/assignment_models.py",
"pipelex/cogt/content_generation/jinja2_generate.py",
"pipelex/cogt/content_generation/llm_generate.py",
"pipelex/cogt/content_generation/content_generator_protocol.py",
"pipelex/cogt/content_generation/ocr_generate.py",
"pipelex/cogt/content_generation/content_generator.py",
"pipelex/cogt/content_generation/img_gen_generate.py",
"pipelex/cogt/img_gen/img_gen_job_components.py",
"pipelex/cogt/img_gen/__init__.py",
"pipelex/cogt/img_gen/img_gen_worker_abstract.py",
"pipelex/cogt/img_gen/img_gen_job.py",
"pipelex/cogt/img_gen/img_gen_worker_factory.py",
"pipelex/cogt/img_gen/img_gen_setting.py",
"pipelex/cogt/img_gen/img_gen_job_factory.py",
"pipelex/cogt/img_gen/img_gen_prompt.py",
"pipelex/cogt/models/__init__.py",
"pipelex/cogt/models/model_manager_abstract.py",
"pipelex/cogt/models/model_manager.py",
"pipelex/cogt/models/model_deck_check.py",
"pipelex/cogt/models/model_deck.py"
],
"can_expand": true
},
{
"name": "AI Model Adapters / Integrations",
"description": "This component contains concrete implementations for integrating with specific third-party AI service providers (e.g., Anthropic, Google, Mistral, AWS Bedrock, OpenAI, FAL). It abstracts away the API specifics, authentication, and data formatting requirements of each external AI service, translating generic AI requests into provider-specific calls.",
"referenced_source_code": [
{
"qualified_name": "pipelex.plugins.plugin_manager.PluginManager",
"reference_file": "pipelex/plugins/plugin_manager.py",
"reference_start_line": 4,
"reference_end_line": 12
}
],
"assigned_files": [
"pipelex/plugins/__init__.py",
"pipelex/plugins/plugin_sdk_registry.py",
"pipelex/plugins/plugin_manager.py",
"pipelex/plugins/openai/__init__.py",
"pipelex/plugins/openai/openai_llm_worker.py",
"pipelex/plugins/openai/openai_llms.py",
"pipelex/plugins/openai/openai_factory.py",
"pipelex/plugins/openai/openai_func.py",
"pipelex/plugins/openai/vertexai_factory.py",
"pipelex/plugins/openai/openai_img_gen_worker.py",
"pipelex/plugins/openai/openai_img_gen_factory.py",
"pipelex/plugins/bedrock/bedrock_llms.py",
"pipelex/plugins/bedrock/__init__.py",
"pipelex/plugins/bedrock/bedrock_client_protocol.py",
"pipelex/plugins/bedrock/bedrock_factory.py",
"pipelex/plugins/bedrock/bedrock_message.py",
"pipelex/plugins/bedrock/bedrock_llm_worker.py",
"pipelex/plugins/bedrock/bedrock_client_aioboto3.py",
"pipelex/plugins/bedrock/bedrock_client_boto3.py",
"pipelex/plugins/anthropic/__init__.py",
"pipelex/plugins/anthropic/anthropic_exceptions.py",
"pipelex/plugins/anthropic/anthropic_llm_worker.py",
"pipelex/plugins/anthropic/anthropic_factory.py",
"pipelex/plugins/anthropic/anthropic_llms.py",
"pipelex/plugins/fal/__init__.py",
"pipelex/plugins/fal/fal_config.py",
"pipelex/plugins/fal/fal_factory.py",
"pipelex/plugins/fal/fal_img_gen_worker.py",
"pipelex/plugins/pypdfium2/__init__.py",
"pipelex/plugins/pypdfium2/pypdfium2_worker.py",
"pipelex/plugins/google/__init__.py",
"pipelex/plugins/google/google_factory.py",
"pipelex/plugins/google/google_llm_worker.py",
"pipelex/plugins/mistral/__init__.py",
"pipelex/plugins/mistral/mistral_utils.py",
"pipelex/plugins/mistral/mistral_factory.py",
"pipelex/plugins/mistral/mistral_exceptions.py",
"pipelex/plugins/mistral/mistral_llms.py",
"pipelex/plugins/mistral/mistral_ocr_worker.py",
"pipelex/plugins/mistral/mistral_llm_worker.py"
],
"can_expand": true
},
{
"name": "Unclassified",
"description": "Component for all unclassified files and utility functions (Utility functions/External Libraries/Dependencies)",
"referenced_source_code": [],
"assigned_files": [
"pipelex/tools/aws/__init__.py",
"pipelex/tools/aws/aws_config.py"
],
"can_expand": false
}
],
"components_relations": [
{
"relation": "delegates to",
"src_name": "Cognitive Tools / AI Operations",
"dst_name": "AI Model Adapters / Integrations"
},
{
"relation": "receives requests from",
"src_name": "AI Model Adapters / Integrations",
"dst_name": "Cognitive Tools / AI Operations"
}
]
}
42 changes: 42 additions & 0 deletions .codeboarding/AI_Services_Integration.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
```mermaid
graph LR
Cognitive_Tools_AI_Operations["Cognitive Tools / AI Operations"]
AI_Model_Adapters_Integrations["AI Model Adapters / Integrations"]
Unclassified["Unclassified"]
Cognitive_Tools_AI_Operations -- "delegates to" --> AI_Model_Adapters_Integrations
AI_Model_Adapters_Integrations -- "receives requests from" --> Cognitive_Tools_AI_Operations
```

[![CodeBoarding](https://img.shields.io/badge/Generated%20by-CodeBoarding-9cf?style=flat-square)](https://github.com/CodeBoarding/CodeBoarding)[![Demo](https://img.shields.io/badge/Try%20our-Demo-blue?style=flat-square)](https://www.codeboarding.org/diagrams)[![Contact](https://img.shields.io/badge/Contact%20us%20-%20contact@codeboarding.org-lightgrey?style=flat-square)](mailto:contact@codeboarding.org)

## Details

The `pipelex` project leverages a modular architecture to integrate and manage various AI capabilities. The `Cognitive Tools / AI Operations` component, primarily embodied by the `InferenceManager`, acts as the central orchestrator for all AI-related tasks. It provides a high-level interface for requesting services from Large Language Models (LLMs), Optical Character Recognition (OCR), and Image Generation models. This component is responsible for routing these requests to the appropriate AI workers and managing their lifecycle. The `AI Model Adapters / Integrations` component, managed by the `PluginManager`, houses the concrete implementations for interacting with diverse third-party AI service providers. These adapters abstract away the complexities of each provider's API, enabling the `InferenceManager` to interact with different AI models seamlessly without needing to know their underlying specifics. This clear separation of concerns allows for easy extension and integration of new AI models and providers.

### Cognitive Tools / AI Operations
This component provides a unified, abstract interface for interacting with various AI models (LLMs, OCR, Image Generation) and manages the routing and inference operations. It acts as a high-level orchestrator for AI capabilities within the pipeline, abstracting the underlying AI service providers. It serves as the primary entry point for the rest of the system to request AI services without needing to know the specifics of the underlying models or providers.


**Related Classes/Methods**:

- <a href="https://github.com/Pipelex/pipelex/blob/mainpipelex/cogt/inference/inference_manager.py#L18-L149" target="_blank" rel="noopener noreferrer">`pipelex.cogt.inference.inference_manager.InferenceManager`:18-149</a>


### AI Model Adapters / Integrations
This component contains concrete implementations for integrating with specific third-party AI service providers (e.g., Anthropic, Google, Mistral, AWS Bedrock, OpenAI, FAL). It abstracts away the API specifics, authentication, and data formatting requirements of each external AI service, translating generic AI requests into provider-specific calls.


**Related Classes/Methods**:

- <a href="https://github.com/Pipelex/pipelex/blob/mainpipelex/plugins/plugin_manager.py#L4-L12" target="_blank" rel="noopener noreferrer">`pipelex.plugins.plugin_manager.PluginManager`:4-12</a>


### Unclassified
Component for all unclassified files and utility functions (Utility functions/External Libraries/Dependencies)


**Related Classes/Methods**: _None_



### [FAQ](https://github.com/CodeBoarding/GeneratedOnBoardings/tree/main?tab=readme-ov-file#faq)
Loading