diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml index 4139d47156..45d896d309 100644 --- a/.github/workflows/python-code-quality.yml +++ b/.github/workflows/python-code-quality.yml @@ -12,13 +12,13 @@ env: UV_CACHE_DIR: /tmp/.uv-cache jobs: - pre-commit: - name: Checks + pre-commit-hooks: + name: Pre-commit Hooks if: "!cancelled()" strategy: fail-fast: false matrix: - python-version: ["3.10", "3.14"] + python-version: ["3.10"] runs-on: ubuntu-latest continue-on-error: true defaults: @@ -37,16 +37,106 @@ jobs: python-version: ${{ matrix.python-version }} os: ${{ runner.os }} env: - # Configure a constant location for the uv cache UV_CACHE_DIR: /tmp/.uv-cache - uses: actions/cache@v5 with: - path: ~/.cache/pre-commit - key: pre-commit|${{ matrix.python-version }}|${{ hashFiles('python/.pre-commit-config.yaml') }} - - uses: pre-commit/action@v3.0.1 - name: Run Pre-Commit Hooks + path: ~/.cache/prek + key: prek|${{ matrix.python-version }}|${{ hashFiles('python/.pre-commit-config.yaml') }} + - uses: j178/prek-action@v1 + name: Run Pre-commit Hooks (excluding poe-check) + env: + SKIP: poe-check with: - extra_args: --config python/.pre-commit-config.yaml --all-files + extra-args: --cd python --all-files + + package-checks: + name: Package Checks + if: "!cancelled()" + strategy: + fail-fast: false + matrix: + python-version: ["3.10"] + runs-on: ubuntu-latest + continue-on-error: true + defaults: + run: + working-directory: ./python + env: + UV_PYTHON: ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + - name: Set up python and install the project + id: python-setup + uses: ./.github/actions/python-setup + with: + python-version: ${{ matrix.python-version }} + os: ${{ runner.os }} + env: + UV_CACHE_DIR: /tmp/.uv-cache + - name: Run fmt, lint, pyright in parallel across packages + run: uv run poe check-packages + + samples-markdown: + name: Samples & Markdown + if: "!cancelled()" + strategy: + fail-fast: false + matrix: + python-version: ["3.10"] + runs-on: ubuntu-latest + continue-on-error: true + defaults: + run: + working-directory: ./python + env: + UV_PYTHON: ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + - name: Set up python and install the project + id: python-setup + uses: ./.github/actions/python-setup + with: + python-version: ${{ matrix.python-version }} + os: ${{ runner.os }} + env: + UV_CACHE_DIR: /tmp/.uv-cache + - name: Run samples lint + run: uv run poe samples-lint + - name: Run samples syntax check + run: uv run poe samples-syntax + - name: Run markdown code lint + run: uv run poe markdown-code-lint + + mypy: + name: Mypy Checks + if: "!cancelled()" + strategy: + fail-fast: false + matrix: + python-version: ["3.10"] + runs-on: ubuntu-latest + continue-on-error: true + defaults: + run: + working-directory: ./python + env: + UV_PYTHON: ${{ matrix.python-version }} + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + - name: Set up python and install the project + id: python-setup + uses: ./.github/actions/python-setup + with: + python-version: ${{ matrix.python-version }} + os: ${{ runner.os }} + env: + UV_CACHE_DIR: /tmp/.uv-cache - name: Run Mypy env: GITHUB_BASE_REF: ${{ github.event.pull_request.base.ref || github.base_ref || 'main' }} diff --git a/python/.pre-commit-config.yaml b/python/.pre-commit-config.yaml index 98de81df06..24de9fc7a0 100644 --- a/python/.pre-commit-config.yaml +++ b/python/.pre-commit-config.yaml @@ -1,59 +1,68 @@ -files: ^python/ fail_fast: true +exclude: ^scripts/ repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + - repo: builtin hooks: - id: check-toml name: Check TOML files files: \.toml$ - exclude: ^python/packages/lab/cookiecutter-agent-framework-lab/ + exclude: ^packages/lab/cookiecutter-agent-framework-lab/ - id: check-yaml name: Check YAML files files: \.yaml$ - id: check-json name: Check JSON files files: \.json$ - exclude: ^.*\.vscode\/.*|^python/demos/samples/chatkit-integration/frontend/(tsconfig.*\.json|package-lock\.json)$ + exclude: ^.*\.vscode\/.*|^demos/samples/chatkit-integration/frontend/(tsconfig.*\.json|package-lock\.json)$ - id: end-of-file-fixer name: Fix End of File files: \.py$ - exclude: ^python/packages/lab/cookiecutter-agent-framework-lab/ + exclude: ^packages/lab/cookiecutter-agent-framework-lab/ - id: mixed-line-ending name: Check Mixed Line Endings files: \.py$ - exclude: ^python/packages/lab/cookiecutter-agent-framework-lab/ + exclude: ^packages/lab/cookiecutter-agent-framework-lab/ + - id: trailing-whitespace + name: Trim Trailing Whitespace + exclude: ^packages/lab/cookiecutter-agent-framework-lab/ + - id: check-merge-conflict + name: Check Merge Conflicts + - id: detect-private-key + name: Detect Private Keys + - id: check-added-large-files + name: Check Added Large Files + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v6.0.0 + hooks: - id: check-ast name: Check Valid Python Samples types: ["python"] - exclude: ^python/packages/lab/cookiecutter-agent-framework-lab/ + exclude: ^packages/lab/cookiecutter-agent-framework-lab/ - repo: https://github.com/asottile/pyupgrade - rev: v3.20.0 + rev: v3.21.2 hooks: - id: pyupgrade name: Upgrade Python syntax args: [--py310-plus] - exclude: ^python/packages/lab/cookiecutter-agent-framework-lab/ + exclude: ^packages/lab/cookiecutter-agent-framework-lab/ - repo: local hooks: - id: poe-check name: Run checks through Poe - entry: uv --directory ./python run poe pre-commit-check + entry: uv run poe prek-check language: system - files: ^python/ - repo: https://github.com/PyCQA/bandit - rev: 1.8.5 + rev: 1.9.3 hooks: - id: bandit name: Bandit Security Checks - args: ["-c", "python/pyproject.toml"] + args: ["-c", "pyproject.toml"] additional_dependencies: ["bandit[toml]"] - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.18 + rev: 0.10.0 hooks: # Update the uv lockfile - id: uv-lock name: Update uv lockfile - files: python/pyproject.toml - args: [--project, python] + files: pyproject.toml diff --git a/python/.vscode/tasks.json b/python/.vscode/tasks.json index 87e340f79d..fc9ce278b3 100644 --- a/python/.vscode/tasks.json +++ b/python/.vscode/tasks.json @@ -9,7 +9,7 @@ "command": "uv", "args": [ "run", - "pre-commit", + "prek", "run", "-a" ], diff --git a/python/AGENTS.md b/python/AGENTS.md index ee440b20ec..62d52608a3 100644 --- a/python/AGENTS.md +++ b/python/AGENTS.md @@ -61,22 +61,9 @@ from agent_framework.azure import AzureOpenAIChatClient, AzureAIAgentClient - **Comments**: Avoid excessive comments; prefer clear code - **Formatting**: Format only files you changed, not the entire codebase -## Sample Structure +## Samples -1. Copyright header: `# Copyright (c) Microsoft. All rights reserved.` -2. Required imports -3. Module docstring: `"""This sample demonstrates..."""` -4. Helper functions -5. Main function(s) demonstrating functionality -6. Entry point: `if __name__ == "__main__": asyncio.run(main())` - -When modifying samples, update associated README files in the same or parent folders. - -### Samples Syntax Checking - -Run `uv run poe samples-syntax` to check samples for syntax errors and missing imports from `agent_framework`. This uses a relaxed pyright configuration that validates imports without strict type checking. - -Some samples depend on external packages (e.g., `azure.ai.agentserver.agentframework`, `microsoft_agents`) that are not installed in the dev environment. These are excluded in `pyrightconfig.samples.json`. When adding or modifying these excluded samples, add them to the exclude list and manually verify they have no import errors from `agent_framework` packages by temporarily removing them from the exclude list and running the check. +See [samples/SAMPLE_GUIDELINES.md](samples/SAMPLE_GUIDELINES.md) for sample structure, external dependency handling (PEP 723), and syntax checking instructions. ## Package Documentation diff --git a/python/CODING_STANDARD.md b/python/CODING_STANDARD.md index 32879bc154..16f34be54c 100644 --- a/python/CODING_STANDARD.md +++ b/python/CODING_STANDARD.md @@ -264,6 +264,13 @@ After the package has been released and gained a measure of confidence: 2. Add the package to the `[all]` extra in `packages/core/pyproject.toml` 3. Create a provider folder in `agent_framework/` with lazy loading `__init__.py` +### Versioning and Core Dependency + +All non-core packages declare a lower bound on `agent-framework-core` (e.g., `"agent-framework-core>=1.0.0b260130"`). Follow these rules when bumping versions: + +- **Core version changes**: When `agent-framework-core` is updated with breaking or significant changes and its version is bumped, update the `agent-framework-core>=...` lower bound in every other package's `pyproject.toml` to match the new core version. +- **Non-core version changes**: Non-core packages (connectors, extensions) can have their own versions incremented independently while keeping the existing core lower bound pinned. Only raise the core lower bound if the non-core package actually depends on new core APIs. + ### Installation Options Connectors are distributed as separate packages and are not imported by default in the core package. Users install the specific connectors they need: diff --git a/python/DEV_SETUP.md b/python/DEV_SETUP.md index c496a5f8c3..f189031468 100644 --- a/python/DEV_SETUP.md +++ b/python/DEV_SETUP.md @@ -64,11 +64,11 @@ uv venv --python $PYTHON_VERSION uv sync --dev # Install all the tools and dependencies uv run poe install -# Install pre-commit hooks -uv run poe pre-commit-install +# Install prek hooks +uv run poe prek-install ``` -Alternatively, you can reinstall the venv, pacakges, dependencies and pre-commit hooks with a single command (but this requires poe in the current env), this is especially useful if you want to switch python versions: +Alternatively, you can reinstall the venv, pacakges, dependencies and prek hooks with a single command (but this requires poe in the current env), this is especially useful if you want to switch python versions: ```bash uv run poe setup -p 3.13 @@ -144,7 +144,7 @@ To run the same checks that run during a commit and the GitHub Action `Python Co uv run poe check ``` -Ideally you should run these checks before committing any changes, when you install using the instructions above the pre-commit hooks should be installed already. +Ideally you should run these checks before committing any changes, when you install using the instructions above the prek hooks should be installed already. ## Code Coverage @@ -196,10 +196,10 @@ and then you can run the following tasks: uv sync --all-extras --dev ``` -After this initial setup, you can use the following tasks to manage your development environment. It is advised to use the following setup command since that also installs the pre-commit hooks. +After this initial setup, you can use the following tasks to manage your development environment. It is advised to use the following setup command since that also installs the prek hooks. #### `setup` -Set up the development environment with a virtual environment, install dependencies and pre-commit hooks: +Set up the development environment with a virtual environment, install dependencies and prek hooks: ```bash uv run poe setup # or with specific Python version @@ -220,36 +220,36 @@ uv run poe venv uv run poe venv --python 3.12 ``` -#### `pre-commit-install` -Install pre-commit hooks: +#### `prek-install` +Install prek hooks: ```bash -uv run poe pre-commit-install +uv run poe prek-install ``` ### Code Quality and Formatting -Each of the following tasks are designed to run against both the main `agent-framework` package and the extension packages, ensuring consistent code quality across the project. +Each of the following tasks run against both the main `agent-framework` package and the extension packages in parallel, ensuring consistent code quality across the project. #### `fmt` (format) -Format code using ruff: +Format code using ruff (runs in parallel across all packages): ```bash uv run poe fmt ``` #### `lint` -Run linting checks and fix issues: +Run linting checks and fix issues (runs in parallel across all packages): ```bash uv run poe lint ``` #### `pyright` -Run Pyright type checking: +Run Pyright type checking (runs in parallel across all packages): ```bash uv run poe pyright ``` #### `mypy` -Run MyPy type checking: +Run MyPy type checking (runs in parallel across all packages): ```bash uv run poe mypy ``` @@ -270,8 +270,14 @@ uv run poe markdown-code-lint ### Comprehensive Checks +#### `check-packages` +Run all package-level quality checks (format, lint, pyright, mypy) in parallel across all packages. This runs the full cross-product of (package × check) concurrently: +```bash +uv run poe check-packages +``` + #### `check` -Run all quality checks (format, lint, pyright, mypy, test, markdown lint): +Run all quality checks including package checks, samples, tests and markdown lint: ```bash uv run poe check ``` @@ -279,7 +285,7 @@ uv run poe check ### Testing #### `test` -Run unit tests with coverage by invoking the `test` task in each package sequentially: +Run unit tests with coverage by invoking the `test` task in each package in parallel: ```bash uv run poe test ``` @@ -325,10 +331,10 @@ Publish packages to PyPI: uv run poe publish ``` -## Pre-commit Hooks +## Prek Hooks -Pre-commit hooks run automatically on commit and execute a subset of the checks on changed files only. You can also run all checks using pre-commit directly: +Prek hooks run automatically on commit and execute a subset of the checks on changed files only. Package-level checks (fmt, lint, pyright) run in parallel but only for packages with changed files. Markdown and sample checks are skipped when no relevant files were changed. If the `core` package is changed, all packages are checked. You can also run all checks using prek directly: ```bash -uv run pre-commit run -a +uv run prek run -a ``` diff --git a/python/devsetup.sh b/python/devsetup.sh index 1f9d0d5549..85f7bab3dd 100644 --- a/python/devsetup.sh +++ b/python/devsetup.sh @@ -6,5 +6,5 @@ uv venv --python $PYTHON_VERSION uv sync --dev # Install all the tools and dependencies uv run poe install -# Install pre-commit hooks -uv run poe pre-commit-install +# Install prek hooks +uv run poe prek-install diff --git a/python/packages/a2a/agent_framework_a2a/_agent.py b/python/packages/a2a/agent_framework_a2a/_agent.py index 10341bc078..335e4c1f68 100644 --- a/python/packages/a2a/agent_framework_a2a/_agent.py +++ b/python/packages/a2a/agent_framework_a2a/_agent.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import base64 import json import re @@ -169,7 +171,7 @@ def _create_timeout_config(self, timeout: float | httpx.Timeout | None) -> httpx msg = f"Invalid timeout type: {type(timeout)}. Expected float, httpx.Timeout, or None." raise TypeError(msg) - async def __aenter__(self) -> "A2AAgent": + async def __aenter__(self) -> A2AAgent: """Async context manager entry.""" return self diff --git a/python/packages/a2a/pyproject.toml b/python/packages/a2a/pyproject.toml index d4de542199..dbf59aea85 100644 --- a/python/packages/a2a/pyproject.toml +++ b/python/packages/a2a/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "a2a-sdk>=0.3.5", ] diff --git a/python/packages/ag-ui/README.md b/python/packages/ag-ui/README.md index ba28068bd5..d2ff4c3d10 100644 --- a/python/packages/ag-ui/README.md +++ b/python/packages/ag-ui/README.md @@ -40,7 +40,6 @@ add_agent_framework_fastapi_endpoint(app, agent, "/") ```python import asyncio -from agent_framework import TextContent from agent_framework.ag_ui import AGUIChatClient async def main(): @@ -48,7 +47,7 @@ async def main(): # Stream responses async for update in client.get_response("Hello!", stream=True): for content in update.contents: - if isinstance(content, TextContent): + if content.type == "text" and content.text: print(content.text, end="", flush=True) print() diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_client.py b/python/packages/ag-ui/agent_framework_ag_ui/_client.py index 8a9755fad9..d04550f9c9 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_client.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_client.py @@ -2,6 +2,8 @@ """AG-UI Chat Client implementation.""" +from __future__ import annotations + import json import logging import sys @@ -216,7 +218,7 @@ def __init__( http_client: httpx.AsyncClient | None = None, timeout: float = 60.0, additional_properties: dict[str, Any] | None = None, - middleware: Sequence["ChatAndFunctionMiddlewareTypes"] | None = None, + middleware: Sequence[ChatAndFunctionMiddlewareTypes] | None = None, function_invocation_configuration: FunctionInvocationConfiguration | None = None, **kwargs: Any, ) -> None: diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_endpoint.py b/python/packages/ag-ui/agent_framework_ag_ui/_endpoint.py index 519a83c39d..b97b67a019 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_endpoint.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_endpoint.py @@ -2,6 +2,8 @@ """FastAPI endpoint creation for AG-UI agents.""" +from __future__ import annotations + import copy import logging from collections.abc import AsyncGenerator, Sequence @@ -77,7 +79,7 @@ async def agent_endpoint(request_body: AGUIRequest) -> StreamingResponse | dict[ ) logger.info(f"Received request at {path}: {input_data.get('run_id', 'no-run-id')}") - async def event_generator() -> AsyncGenerator[str, None]: + async def event_generator() -> AsyncGenerator[str]: encoder = EventEncoder() event_count = 0 async for event in wrapped_agent.run_agent(input_data): diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_event_converters.py b/python/packages/ag-ui/agent_framework_ag_ui/_event_converters.py index 7b7e99e8d4..d59c652b74 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_event_converters.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_event_converters.py @@ -2,6 +2,8 @@ """Event converter for AG-UI protocol events to Agent Framework types.""" +from __future__ import annotations + from typing import Any from agent_framework import ( diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_http_service.py b/python/packages/ag-ui/agent_framework_ag_ui/_http_service.py index 3c5b288454..d694f558cc 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_http_service.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_http_service.py @@ -2,6 +2,8 @@ """HTTP service for AG-UI protocol communication.""" +from __future__ import annotations + import json import logging from collections.abc import AsyncIterable @@ -148,7 +150,7 @@ async def close(self) -> None: if self._owns_client and self.http_client: await self.http_client.aclose() - async def __aenter__(self) -> "AGUIHttpService": + async def __aenter__(self) -> AGUIHttpService: """Enter async context manager.""" return self diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_message_adapters.py b/python/packages/ag-ui/agent_framework_ag_ui/_message_adapters.py index bf1f3d914f..3f35572f78 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_message_adapters.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_message_adapters.py @@ -2,6 +2,8 @@ """Message format conversion between AG-UI and Agent Framework.""" +from __future__ import annotations + import json import logging from typing import Any, cast diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_helpers.py b/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_helpers.py index f12430a086..277b5effce 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_helpers.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_helpers.py @@ -6,6 +6,8 @@ This module retains utilities that may be useful for testing or extensions. """ +from __future__ import annotations + import json import logging from typing import Any diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_predictive_state.py b/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_predictive_state.py index 8662036bbf..216b1ca662 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_predictive_state.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_predictive_state.py @@ -2,6 +2,8 @@ """Predictive state handling utilities.""" +from __future__ import annotations + import json import logging import re diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_tooling.py b/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_tooling.py index f64f8df817..069622f490 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_tooling.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_orchestration/_tooling.py @@ -2,6 +2,8 @@ """Tool handling helpers.""" +from __future__ import annotations + import logging from typing import TYPE_CHECKING, Any @@ -29,7 +31,7 @@ def _collect_mcp_tool_functions(mcp_tools: list[Any]) -> list[Any]: return functions -def collect_server_tools(agent: "SupportsAgentRun") -> list[Any]: +def collect_server_tools(agent: SupportsAgentRun) -> list[Any]: """Collect server tools from an agent. This includes both regular tools from default_options and MCP tools. @@ -64,7 +66,7 @@ def collect_server_tools(agent: "SupportsAgentRun") -> list[Any]: return server_tools -def register_additional_client_tools(agent: "SupportsAgentRun", client_tools: list[Any] | None) -> None: +def register_additional_client_tools(agent: SupportsAgentRun, client_tools: list[Any] | None) -> None: """Register client tools as additional declaration-only tools to avoid server execution. Args: diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_run.py b/python/packages/ag-ui/agent_framework_ag_ui/_run.py index 094c119b45..d47fdc4d67 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_run.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_run.py @@ -2,6 +2,8 @@ """Simplified AG-UI orchestration - single linear flow.""" +from __future__ import annotations + import json import logging import uuid @@ -742,8 +744,8 @@ def _build_messages_snapshot( async def run_agent_stream( input_data: dict[str, Any], agent: SupportsAgentRun, - config: "AgentConfig", -) -> "AsyncGenerator[BaseEvent, None]": + config: AgentConfig, +) -> AsyncGenerator[BaseEvent]: """Run agent and yield AG-UI events. This is the single entry point for all AG-UI agent runs. It follows a simple diff --git a/python/packages/ag-ui/agent_framework_ag_ui/_utils.py b/python/packages/ag-ui/agent_framework_ag_ui/_utils.py index 98a0fd841d..356ad7da96 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui/_utils.py +++ b/python/packages/ag-ui/agent_framework_ag_ui/_utils.py @@ -2,6 +2,8 @@ """Utility functions for AG-UI integration.""" +from __future__ import annotations + import copy import json import uuid diff --git a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/document_writer_agent.py b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/document_writer_agent.py index 221b167fa8..3a74af346a 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/document_writer_agent.py +++ b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/document_writer_agent.py @@ -2,6 +2,8 @@ """Example agent demonstrating predictive state updates with document writing.""" +from __future__ import annotations + from agent_framework import ChatAgent, ChatClientProtocol, tool from agent_framework.ag_ui import AgentFrameworkAgent diff --git a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/recipe_agent.py b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/recipe_agent.py index 39f3803f9a..2d9bb066ba 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/recipe_agent.py +++ b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/recipe_agent.py @@ -2,6 +2,8 @@ """Recipe agent example demonstrating shared state management (Feature 3).""" +from __future__ import annotations + from enum import Enum from typing import Any diff --git a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/task_steps_agent.py b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/task_steps_agent.py index dfd4aea73b..2fe79d063f 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/task_steps_agent.py +++ b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/task_steps_agent.py @@ -2,6 +2,8 @@ """Task steps agent demonstrating agentic generative UI (Feature 6).""" +from __future__ import annotations + import asyncio from collections.abc import AsyncGenerator from enum import Enum @@ -128,7 +130,7 @@ def __getattr__(self, name: str) -> Any: """Delegate all other attribute access to base agent.""" return getattr(self._base_agent, name) - async def run_agent(self, input_data: dict[str, Any]) -> AsyncGenerator[Any, None]: + async def run_agent(self, input_data: dict[str, Any]) -> AsyncGenerator[Any]: """Run the agent and then simulate step execution.""" import logging import uuid diff --git a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/ui_generator_agent.py b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/ui_generator_agent.py index 01b333e7f4..33848c379c 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/ui_generator_agent.py +++ b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/ui_generator_agent.py @@ -2,6 +2,8 @@ """Example agent demonstrating Tool-based Generative UI (Feature 5).""" +from __future__ import annotations + import sys from typing import TYPE_CHECKING, Any, TypedDict diff --git a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/weather_agent.py b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/weather_agent.py index 269a732e92..f8b03c2d0e 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/weather_agent.py +++ b/python/packages/ag-ui/agent_framework_ag_ui_examples/agents/weather_agent.py @@ -2,6 +2,8 @@ """Weather agent example demonstrating backend tool rendering.""" +from __future__ import annotations + from typing import Any from agent_framework import ChatAgent, ChatClientProtocol, tool diff --git a/python/packages/ag-ui/agent_framework_ag_ui_examples/server/main.py b/python/packages/ag-ui/agent_framework_ag_ui_examples/server/main.py index ed4d166941..8c2f4be261 100644 --- a/python/packages/ag-ui/agent_framework_ag_ui_examples/server/main.py +++ b/python/packages/ag-ui/agent_framework_ag_ui_examples/server/main.py @@ -2,6 +2,8 @@ """Example FastAPI server with AG-UI endpoints.""" +from __future__ import annotations + import logging import os from typing import cast diff --git a/python/packages/ag-ui/getting_started/README.md b/python/packages/ag-ui/getting_started/README.md index 9cccdaace1..9421935a4d 100644 --- a/python/packages/ag-ui/getting_started/README.md +++ b/python/packages/ag-ui/getting_started/README.md @@ -292,7 +292,6 @@ Create a file named `client.py`: import asyncio import os -from agent_framework import TextContent from agent_framework.ag_ui import AGUIChatClient @@ -333,7 +332,7 @@ async def main(): # Stream text content as it arrives for content in update.contents: - if isinstance(content, TextContent) and content.text: + if content.type == "text" and content.text: print(content.text, end="", flush=True) print() # New line after response diff --git a/python/packages/ag-ui/getting_started/client_advanced.py b/python/packages/ag-ui/getting_started/client_advanced.py index 82af763918..65f5e896bf 100644 --- a/python/packages/ag-ui/getting_started/client_advanced.py +++ b/python/packages/ag-ui/getting_started/client_advanced.py @@ -9,6 +9,8 @@ - Error handling """ +from __future__ import annotations + import asyncio import os from typing import cast diff --git a/python/packages/ag-ui/getting_started/client_with_agent.py b/python/packages/ag-ui/getting_started/client_with_agent.py index 27bf08503a..5d9917327b 100644 --- a/python/packages/ag-ui/getting_started/client_with_agent.py +++ b/python/packages/ag-ui/getting_started/client_with_agent.py @@ -18,6 +18,8 @@ This matches .NET pattern: thread maintains state, tools execute on appropriate side. """ +from __future__ import annotations + import asyncio import logging import os diff --git a/python/packages/ag-ui/getting_started/server.py b/python/packages/ag-ui/getting_started/server.py index c09e415893..fa3f21c3e7 100644 --- a/python/packages/ag-ui/getting_started/server.py +++ b/python/packages/ag-ui/getting_started/server.py @@ -2,6 +2,8 @@ """AG-UI server example with server-side tools.""" +from __future__ import annotations + import logging import os diff --git a/python/packages/ag-ui/pyproject.toml b/python/packages/ag-ui/pyproject.toml index 3f9af735c9..128c684d35 100644 --- a/python/packages/ag-ui/pyproject.toml +++ b/python/packages/ag-ui/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "ag-ui-protocol>=0.1.9", "fastapi>=0.115.0", "uvicorn>=0.30.0" diff --git a/python/packages/anthropic/agent_framework_anthropic/_chat_client.py b/python/packages/anthropic/agent_framework_anthropic/_chat_client.py index 5f3dfa83c5..641eb52444 100644 --- a/python/packages/anthropic/agent_framework_anthropic/_chat_client.py +++ b/python/packages/anthropic/agent_framework_anthropic/_chat_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import AsyncIterable, Awaitable, Mapping, MutableMapping, Sequence from typing import Any, ClassVar, Final, Generic, Literal, TypedDict diff --git a/python/packages/anthropic/pyproject.toml b/python/packages/anthropic/pyproject.toml index 8935476ed5..7106f8adb0 100644 --- a/python/packages/anthropic/pyproject.toml +++ b/python/packages/anthropic/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "anthropic>=0.70.0,<1", ] diff --git a/python/packages/azure-ai-search/agent_framework_azure_ai_search/_search_provider.py b/python/packages/azure-ai-search/agent_framework_azure_ai_search/_search_provider.py index e40038380a..734d6c08e7 100644 --- a/python/packages/azure-ai-search/agent_framework_azure_ai_search/_search_provider.py +++ b/python/packages/azure-ai-search/agent_framework_azure_ai_search/_search_provider.py @@ -1,6 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import Awaitable, Callable, MutableSequence from typing import TYPE_CHECKING, Any, ClassVar, Literal diff --git a/python/packages/azure-ai-search/pyproject.toml b/python/packages/azure-ai-search/pyproject.toml index fb4763dfd8..cfc7c4786e 100644 --- a/python/packages/azure-ai-search/pyproject.toml +++ b/python/packages/azure-ai-search/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "azure-search-documents==11.7.0b2", ] diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py b/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py index d30a43910d..afeb85ec86 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_agent_provider.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import Callable, MutableMapping, Sequence from typing import TYPE_CHECKING, Any, Generic, cast @@ -141,7 +143,7 @@ def __init__( ) self._should_close_client = True - async def __aenter__(self) -> "Self": + async def __aenter__(self) -> Self: """Async context manager entry.""" return self @@ -177,7 +179,7 @@ async def create_agent( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Create a new agent on the Azure AI service and return a ChatAgent. This method creates a persistent agent on the Azure AI service with the specified @@ -274,7 +276,7 @@ async def get_agent( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Retrieve an existing agent from the service and return a ChatAgent. This method fetches an agent by ID from the Azure AI service @@ -330,7 +332,7 @@ def as_agent( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Wrap an existing Agent SDK object as a ChatAgent without making HTTP calls. Use this method when you already have an Agent object from a previous @@ -383,7 +385,7 @@ def _to_chat_agent_from_agent( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Create a ChatAgent from an Agent SDK object. Args: diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_chat_client.py b/python/packages/azure-ai/agent_framework_azure_ai/_chat_client.py index d37975e1fb..dc013e30d7 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_chat_client.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_chat_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import ast import json import os @@ -346,7 +348,7 @@ class MyOptions(AzureAIAgentOptions, total=False): self._should_close_client = should_close_client # Track whether we should close client connection self._agent_definition: Agent | None = None # Cached definition for existing agent - async def __aenter__(self) -> "Self": + async def __aenter__(self) -> Self: """Async context manager entry.""" return self @@ -1047,9 +1049,7 @@ async def _prepare_tool_definitions_and_resources( return tool_definitions - def _prepare_mcp_resources( - self, tools: Sequence["ToolProtocol | MutableMapping[str, Any]"] - ) -> list[dict[str, Any]]: + def _prepare_mcp_resources(self, tools: Sequence[ToolProtocol | MutableMapping[str, Any]]) -> list[dict[str, Any]]: """Prepare MCP tool resources for approval mode configuration.""" mcp_tools = [tool for tool in tools if isinstance(tool, HostedMCPTool)] if not mcp_tools: @@ -1142,7 +1142,7 @@ def _prepare_messages( return additional_messages, instructions, required_action_results async def _prepare_tools_for_azure_ai( - self, tools: Sequence["ToolProtocol | MutableMapping[str, Any]"], run_options: dict[str, Any] | None = None + self, tools: Sequence[ToolProtocol | MutableMapping[str, Any]], run_options: dict[str, Any] | None = None ) -> list[ToolDefinition | dict[str, Any]]: """Prepare tool definitions for the Azure AI Agents API.""" tool_definitions: list[ToolDefinition | dict[str, Any]] = [] diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_client.py b/python/packages/azure-ai/agent_framework_azure_ai/_client.py index 8c0043808e..2dd3e8cc8b 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_client.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import Callable, Mapping, MutableMapping, Sequence from typing import Any, ClassVar, Generic, TypedDict, TypeVar, cast @@ -295,7 +297,7 @@ async def configure_azure_monitor( # Complete setup with core observability enable_instrumentation(enable_sensitive_data=enable_sensitive_data) - async def __aenter__(self) -> "Self": + async def __aenter__(self) -> Self: """Async context manager entry.""" return self diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py b/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py index 0a5e2f79f6..9c20e08b6c 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_project_provider.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import Callable, MutableMapping, Sequence from typing import Any, Generic @@ -168,7 +170,7 @@ async def create_agent( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Create a new agent on the Azure AI service and return a local ChatAgent wrapper. Args: @@ -270,7 +272,7 @@ async def get_agent( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Retrieve an existing agent from the Azure AI service and return a local ChatAgent wrapper. You must provide either name or reference. Use `as_agent()` if you already have @@ -330,7 +332,7 @@ def as_agent( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Wrap an SDK agent version object into a ChatAgent without making HTTP calls. Use this when you already have an AgentVersionDetails from a previous API call. @@ -370,7 +372,7 @@ def _to_chat_agent_from_details( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Create a ChatAgent from an AgentVersionDetails. Args: diff --git a/python/packages/azure-ai/agent_framework_azure_ai/_shared.py b/python/packages/azure-ai/agent_framework_azure_ai/_shared.py index 1cf33b24d8..065a7d5af2 100644 --- a/python/packages/azure-ai/agent_framework_azure_ai/_shared.py +++ b/python/packages/azure-ai/agent_framework_azure_ai/_shared.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import os from collections.abc import Mapping, MutableMapping, Sequence from typing import Any, ClassVar, Literal, cast diff --git a/python/packages/azure-ai/pyproject.toml b/python/packages/azure-ai/pyproject.toml index bf8e969519..4efe8ed0b7 100644 --- a/python/packages/azure-ai/pyproject.toml +++ b/python/packages/azure-ai/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "azure-ai-projects >= 2.0.0b3", "azure-ai-agents == 1.2.0b5", "aiohttp", diff --git a/python/packages/azurefunctions/agent_framework_azurefunctions/_app.py b/python/packages/azurefunctions/agent_framework_azurefunctions/_app.py index 148602375f..724b95015b 100644 --- a/python/packages/azurefunctions/agent_framework_azurefunctions/_app.py +++ b/python/packages/azurefunctions/agent_framework_azurefunctions/_app.py @@ -6,6 +6,8 @@ with Azure Durable Entities, enabling stateful and durable AI agent execution. """ +from __future__ import annotations + import json import re import uuid diff --git a/python/packages/azurefunctions/agent_framework_azurefunctions/_entities.py b/python/packages/azurefunctions/agent_framework_azurefunctions/_entities.py index 5bf1282687..23ea1e0f5c 100644 --- a/python/packages/azurefunctions/agent_framework_azurefunctions/_entities.py +++ b/python/packages/azurefunctions/agent_framework_azurefunctions/_entities.py @@ -7,6 +7,8 @@ allows for long-running agent conversations. """ +from __future__ import annotations + import asyncio from collections.abc import Callable from typing import Any, cast diff --git a/python/packages/azurefunctions/pyproject.toml b/python/packages/azurefunctions/pyproject.toml index 0b1a8b3797..45b8bbdce9 100644 --- a/python/packages/azurefunctions/pyproject.toml +++ b/python/packages/azurefunctions/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "agent-framework-durabletask", "azure-functions", "azure-functions-durable", diff --git a/python/packages/bedrock/agent_framework_bedrock/_chat_client.py b/python/packages/bedrock/agent_framework_bedrock/_chat_client.py index 63e779291c..ca851269dc 100644 --- a/python/packages/bedrock/agent_framework_bedrock/_chat_client.py +++ b/python/packages/bedrock/agent_framework_bedrock/_chat_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import asyncio import json import sys diff --git a/python/packages/bedrock/pyproject.toml b/python/packages/bedrock/pyproject.toml index aa864223a9..f424cbef6f 100644 --- a/python/packages/bedrock/pyproject.toml +++ b/python/packages/bedrock/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "boto3>=1.35.0,<2.0.0", "botocore>=1.35.0,<2.0.0", ] diff --git a/python/packages/chatkit/agent_framework_chatkit/_converter.py b/python/packages/chatkit/agent_framework_chatkit/_converter.py index d423e112cb..ca5127e8c7 100644 --- a/python/packages/chatkit/agent_framework_chatkit/_converter.py +++ b/python/packages/chatkit/agent_framework_chatkit/_converter.py @@ -2,6 +2,8 @@ """Converter utilities for converting ChatKit thread items to Agent Framework messages.""" +from __future__ import annotations + import logging import sys from collections.abc import Awaitable, Callable, Sequence diff --git a/python/packages/chatkit/pyproject.toml b/python/packages/chatkit/pyproject.toml index 632bf5aa61..89e95b1ab7 100644 --- a/python/packages/chatkit/pyproject.toml +++ b/python/packages/chatkit/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "openai-chatkit>=1.4.0,<2.0.0", ] diff --git a/python/packages/claude/agent_framework_claude/_agent.py b/python/packages/claude/agent_framework_claude/_agent.py index 77893cd165..579c2187ef 100644 --- a/python/packages/claude/agent_framework_claude/_agent.py +++ b/python/packages/claude/agent_framework_claude/_agent.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import contextlib import sys from collections.abc import AsyncIterable, Awaitable, Callable, MutableMapping, Sequence @@ -100,13 +102,13 @@ class ClaudeAgentOptions(TypedDict, total=False): disallowed_tools: list[str] """Blocklist of tools. Claude cannot use these tools.""" - mcp_servers: dict[str, "McpServerConfig"] + mcp_servers: dict[str, McpServerConfig] """MCP server configurations for external tools.""" - permission_mode: "PermissionMode" + permission_mode: PermissionMode """Permission handling mode ("default", "acceptEdits", "plan", "bypassPermissions").""" - can_use_tool: "CanUseTool" + can_use_tool: CanUseTool """Permission callback for tool use.""" max_turns: int @@ -115,16 +117,16 @@ class ClaudeAgentOptions(TypedDict, total=False): max_budget_usd: float """Budget limit in USD.""" - hooks: dict[str, list["HookMatcher"]] + hooks: dict[str, list[HookMatcher]] """Pre/post tool hooks.""" add_dirs: list[str | Path] """Additional directories to add to context.""" - sandbox: "SandboxSettings" + sandbox: SandboxSettings """Sandbox configuration for bash isolation.""" - agents: dict[str, "AgentDefinition"] + agents: dict[str, AgentDefinition] """Custom agent definitions.""" output_format: dict[str, Any] @@ -133,7 +135,7 @@ class ClaudeAgentOptions(TypedDict, total=False): enable_file_checkpointing: bool """Enable file checkpointing for rewind.""" - betas: list["SdkBeta"] + betas: list[SdkBeta] """Beta features to enable.""" @@ -328,7 +330,7 @@ def _normalize_tools( normalized = normalize_tools(tool) self._custom_tools.extend(normalized) - async def __aenter__(self) -> "ClaudeAgent[TOptions]": + async def __aenter__(self) -> ClaudeAgent[TOptions]: """Start the agent when entering async context.""" await self.start() return self diff --git a/python/packages/claude/pyproject.toml b/python/packages/claude/pyproject.toml index 1fd9d04f54..88dae15d01 100644 --- a/python/packages/claude/pyproject.toml +++ b/python/packages/claude/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "claude-agent-sdk>=0.1.25", ] diff --git a/python/packages/copilotstudio/agent_framework_copilotstudio/_agent.py b/python/packages/copilotstudio/agent_framework_copilotstudio/_agent.py index e441161ec3..40f93eee6a 100644 --- a/python/packages/copilotstudio/agent_framework_copilotstudio/_agent.py +++ b/python/packages/copilotstudio/agent_framework_copilotstudio/_agent.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + from collections.abc import AsyncIterable, Awaitable, Sequence from typing import Any, ClassVar, Literal, overload @@ -213,7 +215,7 @@ def run( stream: Literal[False] = False, thread: AgentThread | None = None, **kwargs: Any, - ) -> "Awaitable[AgentResponse]": ... + ) -> Awaitable[AgentResponse]: ... @overload def run( @@ -232,7 +234,7 @@ def run( stream: bool = False, thread: AgentThread | None = None, **kwargs: Any, - ) -> "Awaitable[AgentResponse] | ResponseStream[AgentResponseUpdate, AgentResponse]": + ) -> Awaitable[AgentResponse] | ResponseStream[AgentResponseUpdate, AgentResponse]: """Get a response from the agent. This method returns the final result of the agent's execution diff --git a/python/packages/copilotstudio/pyproject.toml b/python/packages/copilotstudio/pyproject.toml index fef08eeaa4..7e4c61bd7e 100644 --- a/python/packages/copilotstudio/pyproject.toml +++ b/python/packages/copilotstudio/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "microsoft-agents-copilotstudio-client>=0.3.1", ] diff --git a/python/packages/core/agent_framework/_agents.py b/python/packages/core/agent_framework/_agents.py index 9e71738b9b..8d87e65000 100644 --- a/python/packages/core/agent_framework/_agents.py +++ b/python/packages/core/agent_framework/_agents.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import inspect import re import sys @@ -729,7 +731,7 @@ def __init__( self._async_exit_stack = AsyncExitStack() self._update_agent_name_and_description() - async def __aenter__(self) -> "Self": + async def __aenter__(self) -> Self: """Enter the async context manager. If any of the chat_client or local_mcp_tools are context managers, @@ -787,7 +789,7 @@ def run( | MutableMapping[str, Any] | list[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - options: "ChatOptions[TResponseModelT]", + options: ChatOptions[TResponseModelT], **kwargs: Any, ) -> Awaitable[AgentResponse[TResponseModelT]]: ... @@ -803,7 +805,7 @@ def run( | MutableMapping[str, Any] | list[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - options: "TOptions_co | ChatOptions[None] | None" = None, + options: TOptions_co | ChatOptions[None] | None = None, **kwargs: Any, ) -> Awaitable[AgentResponse[Any]]: ... @@ -819,7 +821,7 @@ def run( | MutableMapping[str, Any] | list[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - options: "TOptions_co | ChatOptions[Any] | None" = None, + options: TOptions_co | ChatOptions[Any] | None = None, **kwargs: Any, ) -> ResponseStream[AgentResponseUpdate, AgentResponse[Any]]: ... @@ -834,7 +836,7 @@ def run( | MutableMapping[str, Any] | list[ToolProtocol | Callable[..., Any] | MutableMapping[str, Any]] | None = None, - options: "TOptions_co | ChatOptions[Any] | None" = None, + options: TOptions_co | ChatOptions[Any] | None = None, **kwargs: Any, ) -> Awaitable[AgentResponse[Any]] | ResponseStream[AgentResponseUpdate, AgentResponse[Any]]: """Run the agent with the given messages and options. @@ -1149,9 +1151,9 @@ def as_mcp_server( server_name: str = "Agent", version: str | None = None, instructions: str | None = None, - lifespan: Callable[["Server[Any]"], AbstractAsyncContextManager[Any]] | None = None, + lifespan: Callable[[Server[Any]], AbstractAsyncContextManager[Any]] | None = None, **kwargs: Any, - ) -> "Server[Any]": + ) -> Server[Any]: """Create an MCP server from an agent instance. This function automatically creates a MCP server from an agent instance, it uses the provided arguments to @@ -1177,7 +1179,7 @@ def as_mcp_server( if kwargs: server_args.update(kwargs) - server: "Server[Any]" = Server(**server_args) # type: ignore[call-arg] + server: Server[Any] = Server(**server_args) # type: ignore[call-arg] agent_tool = self.as_tool(name=self._get_agent_name()) diff --git a/python/packages/core/agent_framework/_clients.py b/python/packages/core/agent_framework/_clients.py index 5bafb60eb5..d44c8e7f80 100644 --- a/python/packages/core/agent_framework/_clients.py +++ b/python/packages/core/agent_framework/_clients.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from abc import ABC, abstractmethod from collections.abc import ( @@ -137,7 +139,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: "ChatOptions[TResponseModelT]", + options: ChatOptions[TResponseModelT], **kwargs: Any, ) -> Awaitable[ChatResponse[TResponseModelT]]: ... @@ -147,7 +149,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: "TOptions_contra | ChatOptions[None] | None" = None, + options: TOptions_contra | ChatOptions[None] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]]: ... @@ -157,7 +159,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[True], - options: "TOptions_contra | ChatOptions[Any] | None" = None, + options: TOptions_contra | ChatOptions[Any] | None = None, **kwargs: Any, ) -> ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: ... @@ -166,7 +168,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: bool = False, - options: "TOptions_contra | ChatOptions[Any] | None" = None, + options: TOptions_contra | ChatOptions[Any] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]] | ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: """Send input and return the response. @@ -366,7 +368,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: "ChatOptions[TResponseModelT]", + options: ChatOptions[TResponseModelT], **kwargs: Any, ) -> Awaitable[ChatResponse[TResponseModelT]]: ... @@ -376,7 +378,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[False] = ..., - options: "TOptions_co | ChatOptions[None] | None" = None, + options: TOptions_co | ChatOptions[None] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]]: ... @@ -386,7 +388,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: Literal[True], - options: "TOptions_co | ChatOptions[Any] | None" = None, + options: TOptions_co | ChatOptions[Any] | None = None, **kwargs: Any, ) -> ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: ... @@ -395,7 +397,7 @@ def get_response( messages: str | ChatMessage | Sequence[str | ChatMessage], *, stream: bool = False, - options: "TOptions_co | ChatOptions[Any] | None" = None, + options: TOptions_co | ChatOptions[Any] | None = None, **kwargs: Any, ) -> Awaitable[ChatResponse[Any]] | ResponseStream[ChatResponseUpdate, ChatResponse[Any]]: """Get a response from a chat client. @@ -443,10 +445,10 @@ def as_agent( default_options: TOptions_co | Mapping[str, Any] | None = None, chat_message_store_factory: Callable[[], ChatMessageStoreProtocol] | None = None, context_provider: ContextProvider | None = None, - middleware: Sequence["MiddlewareTypes"] | None = None, + middleware: Sequence[MiddlewareTypes] | None = None, function_invocation_configuration: FunctionInvocationConfiguration | None = None, **kwargs: Any, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Create a ChatAgent with this client. This is a convenience method that creates a ChatAgent instance with this diff --git a/python/packages/core/agent_framework/_mcp.py b/python/packages/core/agent_framework/_mcp.py index 578fb606e1..d716aa0c94 100644 --- a/python/packages/core/agent_framework/_mcp.py +++ b/python/packages/core/agent_framework/_mcp.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import asyncio import base64 import logging @@ -333,7 +335,7 @@ def __init__( parse_prompt_results: Literal[True] | Callable[[types.GetPromptResult], Any] | None = True, session: ClientSession | None = None, request_timeout: int | None = None, - chat_client: "ChatClientProtocol | None" = None, + chat_client: ChatClientProtocol | None = None, additional_properties: dict[str, Any] | None = None, ) -> None: """Initialize the MCP Tool base. @@ -940,7 +942,7 @@ def __init__( args: list[str] | None = None, env: dict[str, str] | None = None, encoding: str | None = None, - chat_client: "ChatClientProtocol | None" = None, + chat_client: ChatClientProtocol | None = None, additional_properties: dict[str, Any] | None = None, **kwargs: Any, ) -> None: @@ -1059,7 +1061,7 @@ def __init__( approval_mode: (Literal["always_require", "never_require"] | HostedMCPSpecificApproval | None) = None, allowed_tools: Collection[str] | None = None, terminate_on_close: bool | None = None, - chat_client: "ChatClientProtocol | None" = None, + chat_client: ChatClientProtocol | None = None, additional_properties: dict[str, Any] | None = None, http_client: httpx.AsyncClient | None = None, **kwargs: Any, @@ -1173,7 +1175,7 @@ def __init__( description: str | None = None, approval_mode: (Literal["always_require", "never_require"] | HostedMCPSpecificApproval | None) = None, allowed_tools: Collection[str] | None = None, - chat_client: "ChatClientProtocol | None" = None, + chat_client: ChatClientProtocol | None = None, additional_properties: dict[str, Any] | None = None, **kwargs: Any, ) -> None: diff --git a/python/packages/core/agent_framework/_memory.py b/python/packages/core/agent_framework/_memory.py index 5e46b1749d..465bc1ffec 100644 --- a/python/packages/core/agent_framework/_memory.py +++ b/python/packages/core/agent_framework/_memory.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from abc import ABC, abstractmethod from collections.abc import MutableSequence, Sequence @@ -50,7 +52,7 @@ def __init__( self, instructions: str | None = None, messages: Sequence[ChatMessage] | None = None, - tools: Sequence["ToolProtocol"] | None = None, + tools: Sequence[ToolProtocol] | None = None, ): """Create a new Context object. @@ -61,7 +63,7 @@ def __init__( """ self.instructions = instructions self.messages: Sequence[ChatMessage] = messages or [] - self.tools: Sequence["ToolProtocol"] = tools or [] + self.tools: Sequence[ToolProtocol] = tools or [] # region ContextProvider @@ -151,7 +153,7 @@ async def invoking(self, messages: ChatMessage | MutableSequence[ChatMessage], * """ pass - async def __aenter__(self) -> "Self": + async def __aenter__(self) -> Self: """Enter the async context manager. Override this method to perform any setup operations when the context provider is entered. diff --git a/python/packages/core/agent_framework/_pydantic.py b/python/packages/core/agent_framework/_pydantic.py index 8aac34e02f..a54f7b81af 100644 --- a/python/packages/core/agent_framework/_pydantic.py +++ b/python/packages/core/agent_framework/_pydantic.py @@ -1,6 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + from typing import Annotated, Any, ClassVar, TypeVar from pydantic import Field, UrlConstraints @@ -48,7 +50,7 @@ def __init__( kwargs = {k: v for k, v in kwargs.items() if v is not None} super().__init__(**kwargs) - def __new__(cls: type["TSettings"], *args: Any, **kwargs: Any) -> "TSettings": + def __new__(cls: type[TSettings], *args: Any, **kwargs: Any) -> TSettings: """Override the __new__ method to set the env_prefix.""" # for both, if supplied but None, set to default if "env_file_encoding" in kwargs and kwargs["env_file_encoding"] is not None: diff --git a/python/packages/core/agent_framework/_serialization.py b/python/packages/core/agent_framework/_serialization.py index dd6b8f871f..c70f73e1d2 100644 --- a/python/packages/core/agent_framework/_serialization.py +++ b/python/packages/core/agent_framework/_serialization.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import json import re from collections.abc import Mapping, MutableMapping diff --git a/python/packages/core/agent_framework/_telemetry.py b/python/packages/core/agent_framework/_telemetry.py index 8b7d26fa4a..5e3ee57222 100644 --- a/python/packages/core/agent_framework/_telemetry.py +++ b/python/packages/core/agent_framework/_telemetry.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import os from typing import Any, Final diff --git a/python/packages/core/agent_framework/_threads.py b/python/packages/core/agent_framework/_threads.py index 6692bdb3c4..74462d3a90 100644 --- a/python/packages/core/agent_framework/_threads.py +++ b/python/packages/core/agent_framework/_threads.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + from collections.abc import MutableMapping, Sequence from typing import Any, Protocol, TypeVar @@ -74,7 +76,7 @@ async def add_messages(self, messages: Sequence[ChatMessage]) -> None: @classmethod async def deserialize( cls, serialized_store_state: MutableMapping[str, Any], **kwargs: Any - ) -> "ChatMessageStoreProtocol": + ) -> ChatMessageStoreProtocol: """Creates a new instance of the store from previously serialized state. This method, together with ``serialize()`` can be used to save and load messages from a persistent store diff --git a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py index 516a4547a0..644744c798 100644 --- a/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py +++ b/python/packages/core/agent_framework/_workflows/_checkpoint_encoding.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import contextlib import importlib import logging diff --git a/python/packages/core/agent_framework/azure/_assistants_client.py b/python/packages/core/agent_framework/azure/_assistants_client.py index 4f1d2190be..3ded58e9b0 100644 --- a/python/packages/core/agent_framework/azure/_assistants_client.py +++ b/python/packages/core/agent_framework/azure/_assistants_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import Mapping from typing import TYPE_CHECKING, Any, ClassVar, Generic @@ -63,7 +65,7 @@ def __init__( ad_token: str | None = None, ad_token_provider: AsyncAzureADTokenProvider | None = None, token_endpoint: str | None = None, - credential: "TokenCredential | None" = None, + credential: TokenCredential | None = None, default_headers: Mapping[str, str] | None = None, async_client: AsyncAzureOpenAI | None = None, env_file_path: str | None = None, diff --git a/python/packages/core/agent_framework/azure/_chat_client.py b/python/packages/core/agent_framework/azure/_chat_client.py index 4aa85e6d7e..a603af52cc 100644 --- a/python/packages/core/agent_framework/azure/_chat_client.py +++ b/python/packages/core/agent_framework/azure/_chat_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import json import logging import sys @@ -175,7 +177,7 @@ def __init__( env_file_path: str | None = None, env_file_encoding: str | None = None, instruction_role: str | None = None, - middleware: Sequence["MiddlewareTypes"] | None = None, + middleware: Sequence[MiddlewareTypes] | None = None, function_invocation_configuration: FunctionInvocationConfiguration | None = None, **kwargs: Any, ) -> None: diff --git a/python/packages/core/agent_framework/azure/_entra_id_authentication.py b/python/packages/core/agent_framework/azure/_entra_id_authentication.py index 1f044feff9..229db60d31 100644 --- a/python/packages/core/agent_framework/azure/_entra_id_authentication.py +++ b/python/packages/core/agent_framework/azure/_entra_id_authentication.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import logging from typing import TYPE_CHECKING, Any @@ -15,7 +17,7 @@ def get_entra_auth_token( - credential: "TokenCredential", + credential: TokenCredential, token_endpoint: str, **kwargs: Any, ) -> str | None: @@ -49,7 +51,7 @@ def get_entra_auth_token( async def get_entra_auth_token_async( - credential: "AsyncTokenCredential", token_endpoint: str, **kwargs: Any + credential: AsyncTokenCredential, token_endpoint: str, **kwargs: Any ) -> str | None: """Retrieve a async Microsoft Entra Auth Token for a given token endpoint. diff --git a/python/packages/core/agent_framework/azure/_responses_client.py b/python/packages/core/agent_framework/azure/_responses_client.py index 8f67b726a8..11eee1900f 100644 --- a/python/packages/core/agent_framework/azure/_responses_client.py +++ b/python/packages/core/agent_framework/azure/_responses_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import Mapping, Sequence from typing import TYPE_CHECKING, Any, Generic @@ -74,7 +76,7 @@ def __init__( env_file_path: str | None = None, env_file_encoding: str | None = None, instruction_role: str | None = None, - middleware: Sequence["MiddlewareTypes"] | None = None, + middleware: Sequence[MiddlewareTypes] | None = None, function_invocation_configuration: FunctionInvocationConfiguration | None = None, **kwargs: Any, ) -> None: diff --git a/python/packages/core/agent_framework/azure/_shared.py b/python/packages/core/agent_framework/azure/_shared.py index e3eb37b26e..8e90002a75 100644 --- a/python/packages/core/agent_framework/azure/_shared.py +++ b/python/packages/core/agent_framework/azure/_shared.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import logging import sys from collections.abc import Awaitable, Callable, Mapping @@ -110,7 +112,7 @@ class AzureOpenAISettings(AFBaseSettings): default_token_endpoint: str = DEFAULT_AZURE_TOKEN_ENDPOINT def get_azure_auth_token( - self, credential: "TokenCredential", token_endpoint: str | None = None, **kwargs: Any + self, credential: TokenCredential, token_endpoint: str | None = None, **kwargs: Any ) -> str | None: """Retrieve a Microsoft Entra Auth Token for a given token endpoint for the use with Azure OpenAI. diff --git a/python/packages/core/agent_framework/openai/_assistant_provider.py b/python/packages/core/agent_framework/openai/_assistant_provider.py index 103b23e716..263c4dcab1 100644 --- a/python/packages/core/agent_framework/openai/_assistant_provider.py +++ b/python/packages/core/agent_framework/openai/_assistant_provider.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import Awaitable, Callable, MutableMapping, Sequence from typing import TYPE_CHECKING, Any, Generic, cast @@ -177,7 +179,7 @@ def __init__( self._client = AsyncOpenAI(**client_args) - async def __aenter__(self) -> "Self": + async def __aenter__(self) -> Self: """Async context manager entry.""" return self @@ -206,7 +208,7 @@ async def create_agent( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Create a new assistant on OpenAI and return a ChatAgent. This method creates a new assistant on the OpenAI service and wraps it @@ -314,7 +316,7 @@ async def get_agent( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Retrieve an existing assistant by ID and return a ChatAgent. This method fetches an existing assistant from OpenAI by its ID @@ -380,7 +382,7 @@ def as_agent( default_options: TOptions_co | None = None, middleware: Sequence[MiddlewareTypes] | None = None, context_provider: ContextProvider | None = None, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Wrap an existing SDK Assistant object as a ChatAgent. This method does NOT make any HTTP calls. It simply wraps an already- @@ -524,7 +526,7 @@ def _create_chat_agent_from_assistant( context_provider: ContextProvider | None, default_options: TOptions_co | None = None, **kwargs: Any, - ) -> "ChatAgent[TOptions_co]": + ) -> ChatAgent[TOptions_co]: """Create a ChatAgent from an Assistant. Args: diff --git a/python/packages/core/agent_framework/openai/_assistants_client.py b/python/packages/core/agent_framework/openai/_assistants_client.py index 559b180e02..1f6bdb87dc 100644 --- a/python/packages/core/agent_framework/openai/_assistants_client.py +++ b/python/packages/core/agent_framework/openai/_assistants_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import json import sys from collections.abc import ( @@ -227,7 +229,7 @@ def __init__( async_client: AsyncOpenAI | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, - middleware: Sequence["MiddlewareTypes"] | None = None, + middleware: Sequence[MiddlewareTypes] | None = None, function_invocation_configuration: FunctionInvocationConfiguration | None = None, **kwargs: Any, ) -> None: @@ -325,7 +327,7 @@ class MyOptions(OpenAIAssistantsOptions, total=False): self.thread_id: str | None = thread_id self._should_delete_assistant: bool = False - async def __aenter__(self) -> "Self": + async def __aenter__(self) -> Self: """Async context manager entry.""" return self diff --git a/python/packages/core/agent_framework/openai/_chat_client.py b/python/packages/core/agent_framework/openai/_chat_client.py index 9ec10644e8..4ca47a4481 100644 --- a/python/packages/core/agent_framework/openai/_chat_client.py +++ b/python/packages/core/agent_framework/openai/_chat_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import json import sys from collections.abc import AsyncIterable, Awaitable, Callable, Mapping, MutableMapping, Sequence @@ -305,7 +307,7 @@ def _prepare_options(self, messages: Sequence[ChatMessage], options: Mapping[str run_options["response_format"] = type_to_response_format_param(response_format) return run_options - def _parse_response_from_openai(self, response: ChatCompletion, options: Mapping[str, Any]) -> "ChatResponse": + def _parse_response_from_openai(self, response: ChatCompletion, options: Mapping[str, Any]) -> ChatResponse: """Parse a response from OpenAI into a ChatResponse.""" response_metadata = self._get_metadata_from_chat_response(response) messages: list[ChatMessage] = [] diff --git a/python/packages/core/agent_framework/openai/_exceptions.py b/python/packages/core/agent_framework/openai/_exceptions.py index b48f66ad16..4b4944fd7a 100644 --- a/python/packages/core/agent_framework/openai/_exceptions.py +++ b/python/packages/core/agent_framework/openai/_exceptions.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + from dataclasses import dataclass from enum import Enum from typing import Any @@ -29,7 +31,7 @@ class ContentFilterResult: severity: ContentFilterResultSeverity = ContentFilterResultSeverity.SAFE @classmethod - def from_inner_error_result(cls, inner_error_results: dict[str, Any]) -> "ContentFilterResult": + def from_inner_error_result(cls, inner_error_results: dict[str, Any]) -> ContentFilterResult: """Creates a ContentFilterResult from the inner error results. Args: diff --git a/python/packages/core/agent_framework/openai/_responses_client.py b/python/packages/core/agent_framework/openai/_responses_client.py index a2e7162f70..b2b7451918 100644 --- a/python/packages/core/agent_framework/openai/_responses_client.py +++ b/python/packages/core/agent_framework/openai/_responses_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import ( AsyncIterable, @@ -815,7 +817,7 @@ def _parse_response_from_openai( self, response: OpenAIResponse | ParsedResponse[BaseModel], options: dict[str, Any], - ) -> "ChatResponse": + ) -> ChatResponse: """Parse an OpenAI Responses API response into a ChatResponse.""" structured_response: BaseModel | None = response.output_parsed if isinstance(response, ParsedResponse) else None # type: ignore[reportUnknownMemberType] @@ -945,7 +947,7 @@ def _parse_response_from_openai( ) case "code_interpreter_call": # ResponseOutputCodeInterpreterCall call_id = getattr(item, "call_id", None) or getattr(item, "id", None) - outputs: list["Content"] = [] + outputs: list[Content] = [] if item_outputs := getattr(item, "outputs", None): for code_output in item_outputs: if getattr(code_output, "type", None) == "logs": @@ -1456,7 +1458,7 @@ def __init__( env_file_path: str | None = None, env_file_encoding: str | None = None, middleware: ( - Sequence["ChatMiddleware | ChatMiddlewareCallable | FunctionMiddleware | FunctionMiddlewareCallable"] | None + Sequence[ChatMiddleware | ChatMiddlewareCallable | FunctionMiddleware | FunctionMiddlewareCallable] | None ) = None, function_invocation_configuration: FunctionInvocationConfiguration | None = None, **kwargs: Any, diff --git a/python/packages/core/agent_framework/openai/_shared.py b/python/packages/core/agent_framework/openai/_shared.py index e90ec48bc8..dbf0d9f6f6 100644 --- a/python/packages/core/agent_framework/openai/_shared.py +++ b/python/packages/core/agent_framework/openai/_shared.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import logging from collections.abc import Awaitable, Callable, Mapping, MutableMapping, Sequence from copy import copy diff --git a/python/packages/declarative/agent_framework_declarative/_loader.py b/python/packages/declarative/agent_framework_declarative/_loader.py index 0476e5be54..493787350e 100644 --- a/python/packages/declarative/agent_framework_declarative/_loader.py +++ b/python/packages/declarative/agent_framework_declarative/_loader.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import Callable, Mapping from pathlib import Path diff --git a/python/packages/declarative/agent_framework_declarative/_models.py b/python/packages/declarative/agent_framework_declarative/_models.py index 3066848927..107978e36b 100644 --- a/python/packages/declarative/agent_framework_declarative/_models.py +++ b/python/packages/declarative/agent_framework_declarative/_models.py @@ -1,4 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import os from collections.abc import MutableMapping from contextvars import ContextVar @@ -101,7 +103,7 @@ def __init__( @classmethod def from_dict( cls, value: MutableMapping[str, Any], /, *, dependencies: MutableMapping[str, Any] | None = None - ) -> "Property": + ) -> Property: """Create a Property instance from a dictionary, dispatching to the appropriate subclass.""" # Only dispatch if we're being called on the base Property class if cls is not Property: @@ -211,7 +213,7 @@ def __init__( @classmethod def from_dict( cls, value: MutableMapping[str, Any], /, *, dependencies: MutableMapping[str, Any] | None = None - ) -> "PropertySchema": + ) -> PropertySchema: """Create a PropertySchema instance from a dictionary, filtering out 'kind' field.""" # Filter out 'kind', 'type', 'name', and 'description' fields that may appear in YAML # but aren't PropertySchema params @@ -491,7 +493,7 @@ def __init__( @classmethod def from_dict( cls, value: MutableMapping[str, Any], /, *, dependencies: MutableMapping[str, Any] | None = None - ) -> "AgentDefinition": + ) -> AgentDefinition: """Create an AgentDefinition instance from a dictionary, dispatching to the appropriate subclass.""" # Only dispatch if we're being called on the base AgentDefinition class if cls is not AgentDefinition: @@ -537,7 +539,7 @@ def __init__( @classmethod def from_dict( cls: type[TTool], value: MutableMapping[str, Any], /, *, dependencies: MutableMapping[str, Any] | None = None - ) -> "TTool": + ) -> TTool: """Create a Tool instance from a dictionary, dispatching to the appropriate subclass.""" # Only dispatch if we're being called on the base Tool class if cls is not Tool: @@ -867,7 +869,7 @@ def __init__( @classmethod def from_dict( cls, value: MutableMapping[str, Any], /, *, dependencies: MutableMapping[str, Any] | None = None - ) -> "Resource": + ) -> Resource: """Create a Resource instance from a dictionary, dispatching to the appropriate subclass.""" # Only dispatch if we're being called on the base Resource class if cls is not Resource: diff --git a/python/packages/declarative/agent_framework_declarative/_workflows/_actions_agents.py b/python/packages/declarative/agent_framework_declarative/_workflows/_actions_agents.py index 9589fe8c28..1a49f9b89d 100644 --- a/python/packages/declarative/agent_framework_declarative/_workflows/_actions_agents.py +++ b/python/packages/declarative/agent_framework_declarative/_workflows/_actions_agents.py @@ -7,6 +7,8 @@ - InvokePromptAgent: Invoke a local prompt-based agent """ +from __future__ import annotations + import json from collections.abc import AsyncGenerator from typing import Any, cast @@ -185,7 +187,7 @@ def _build_messages_from_state(ctx: ActionContext) -> list[ChatMessage]: @action_handler("InvokeAzureAgent") -async def handle_invoke_azure_agent(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: +async def handle_invoke_azure_agent(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: """Invoke a hosted Azure AI agent. Supports both Python-style and .NET-style YAML schemas: @@ -523,7 +525,7 @@ def _normalize_variable_path(variable: str) -> str: @action_handler("InvokePromptAgent") -async def handle_invoke_prompt_agent(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: +async def handle_invoke_prompt_agent(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: """Invoke a local prompt-based agent (similar to InvokeAzureAgent but for local agents). Action schema: diff --git a/python/packages/declarative/agent_framework_declarative/_workflows/_actions_basic.py b/python/packages/declarative/agent_framework_declarative/_workflows/_actions_basic.py index 243fe36e04..d5132e125c 100644 --- a/python/packages/declarative/agent_framework_declarative/_workflows/_actions_basic.py +++ b/python/packages/declarative/agent_framework_declarative/_workflows/_actions_basic.py @@ -16,6 +16,8 @@ actually yielding any events. """ +from __future__ import annotations + from collections.abc import AsyncGenerator from typing import TYPE_CHECKING, Any, cast @@ -37,7 +39,7 @@ @action_handler("SetValue") -async def handle_set_value(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_set_value(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Set a value in the workflow state. Action schema: @@ -63,7 +65,7 @@ async def handle_set_value(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, @action_handler("SetVariable") -async def handle_set_variable(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_set_variable(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Set a variable in the workflow state (.NET workflow format). This is an alias for SetValue with 'variable' instead of 'path'. @@ -113,7 +115,7 @@ def _normalize_variable_path(variable: str) -> str: @action_handler("AppendValue") -async def handle_append_value(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_append_value(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Append a value to a list in the workflow state. Action schema: @@ -139,7 +141,7 @@ async def handle_append_value(ctx: ActionContext) -> AsyncGenerator[WorkflowEven @action_handler("SendActivity") -async def handle_send_activity(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_send_activity(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Send text or attachments to the user. Action schema (object form): @@ -189,7 +191,7 @@ async def handle_send_activity(ctx: ActionContext) -> AsyncGenerator[WorkflowEve @action_handler("EmitEvent") -async def handle_emit_event(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_emit_event(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Emit a custom workflow event. Action schema: @@ -213,7 +215,7 @@ async def handle_emit_event(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, yield CustomEvent(name=name, data=evaluated_data) -def _evaluate_dict_values(d: dict[str, Any], state: "WorkflowState") -> dict[str, Any]: +def _evaluate_dict_values(d: dict[str, Any], state: WorkflowState) -> dict[str, Any]: """Recursively evaluate PowerFx expressions in a dictionary. Args: @@ -245,7 +247,7 @@ def _evaluate_dict_values(d: dict[str, Any], state: "WorkflowState") -> dict[str @action_handler("SetTextVariable") -async def handle_set_text_variable(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_set_text_variable(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Set a text variable with string interpolation support. This is similar to SetVariable but supports multi-line text with @@ -281,7 +283,7 @@ async def handle_set_text_variable(ctx: ActionContext) -> AsyncGenerator[Workflo @action_handler("SetMultipleVariables") -async def handle_set_multiple_variables(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_set_multiple_variables(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Set multiple variables at once. Action schema: @@ -313,7 +315,7 @@ async def handle_set_multiple_variables(ctx: ActionContext) -> AsyncGenerator[Wo @action_handler("ResetVariable") -async def handle_reset_variable(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_reset_variable(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Reset a variable to its default/blank state. Action schema: @@ -336,7 +338,7 @@ async def handle_reset_variable(ctx: ActionContext) -> AsyncGenerator[WorkflowEv @action_handler("ClearAllVariables") -async def handle_clear_all_variables(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_clear_all_variables(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Clear all turn-scoped variables. Action schema: @@ -350,7 +352,7 @@ async def handle_clear_all_variables(ctx: ActionContext) -> AsyncGenerator[Workf @action_handler("CreateConversation") -async def handle_create_conversation(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_create_conversation(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Create a new conversation context. Action schema (.NET style): @@ -399,7 +401,7 @@ async def handle_create_conversation(ctx: ActionContext) -> AsyncGenerator[Workf @action_handler("AddConversationMessage") -async def handle_add_conversation_message(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_add_conversation_message(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Add a message to a conversation. Action schema: @@ -451,7 +453,7 @@ async def handle_add_conversation_message(ctx: ActionContext) -> AsyncGenerator[ @action_handler("CopyConversationMessages") -async def handle_copy_conversation_messages(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_copy_conversation_messages(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Copy messages from one conversation to another. Action schema: @@ -506,7 +508,7 @@ async def handle_copy_conversation_messages(ctx: ActionContext) -> AsyncGenerato @action_handler("RetrieveConversationMessages") -async def handle_retrieve_conversation_messages(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_retrieve_conversation_messages(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Retrieve messages from a conversation and store in a variable. Action schema: @@ -547,7 +549,7 @@ async def handle_retrieve_conversation_messages(ctx: ActionContext) -> AsyncGene yield # Make it a generator -def _interpolate_string(text: str, state: "WorkflowState") -> str: +def _interpolate_string(text: str, state: WorkflowState) -> str: """Interpolate {Variable.Path} references in a string. Args: diff --git a/python/packages/declarative/agent_framework_declarative/_workflows/_actions_error.py b/python/packages/declarative/agent_framework_declarative/_workflows/_actions_error.py index d59a65e668..9e32bd8647 100644 --- a/python/packages/declarative/agent_framework_declarative/_workflows/_actions_error.py +++ b/python/packages/declarative/agent_framework_declarative/_workflows/_actions_error.py @@ -7,6 +7,8 @@ - TryCatch: Try-catch-finally error handling """ +from __future__ import annotations + from collections.abc import AsyncGenerator from dataclasses import dataclass @@ -44,7 +46,7 @@ class ErrorEvent(WorkflowEvent): @action_handler("ThrowException") -async def handle_throw_exception(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_throw_exception(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Raise an exception that can be caught by TryCatch. Action schema: @@ -67,7 +69,7 @@ async def handle_throw_exception(ctx: ActionContext) -> AsyncGenerator[WorkflowE @action_handler("TryCatch") -async def handle_try_catch(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: +async def handle_try_catch(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: """Try-catch-finally error handling. Action schema: diff --git a/python/packages/declarative/agent_framework_declarative/_workflows/_declarative_base.py b/python/packages/declarative/agent_framework_declarative/_workflows/_declarative_base.py index 501cd1d943..11b6868ad1 100644 --- a/python/packages/declarative/agent_framework_declarative/_workflows/_declarative_base.py +++ b/python/packages/declarative/agent_framework_declarative/_workflows/_declarative_base.py @@ -23,6 +23,8 @@ See: dotnet/src/Microsoft.Agents.AI.Workflows.Declarative/PowerFx/ """ +from __future__ import annotations + import logging import sys from collections.abc import Mapping @@ -148,7 +150,7 @@ def __init__(self, state: State): """ self._state = state - def initialize(self, inputs: "Mapping[str, Any] | None" = None) -> None: + def initialize(self, inputs: Mapping[str, Any] | None = None) -> None: """Initialize the declarative state with inputs. Args: @@ -814,7 +816,7 @@ def _get_state(self, state: State) -> DeclarativeWorkflowState: async def _ensure_state_initialized( self, - ctx: "WorkflowContext[Any, Any]", + ctx: WorkflowContext[Any, Any], trigger: Any, ) -> DeclarativeWorkflowState: """Ensure declarative state is initialized. diff --git a/python/packages/declarative/agent_framework_declarative/_workflows/_declarative_builder.py b/python/packages/declarative/agent_framework_declarative/_workflows/_declarative_builder.py index 4e649f8f04..1f1069c02c 100644 --- a/python/packages/declarative/agent_framework_declarative/_workflows/_declarative_builder.py +++ b/python/packages/declarative/agent_framework_declarative/_workflows/_declarative_builder.py @@ -11,6 +11,8 @@ - Loop edges for foreach """ +from __future__ import annotations + from typing import Any from agent_framework._workflows import ( diff --git a/python/packages/declarative/agent_framework_declarative/_workflows/_factory.py b/python/packages/declarative/agent_framework_declarative/_workflows/_factory.py index 9d6c50ee44..c3cfff1d21 100644 --- a/python/packages/declarative/agent_framework_declarative/_workflows/_factory.py +++ b/python/packages/declarative/agent_framework_declarative/_workflows/_factory.py @@ -10,6 +10,8 @@ enabling checkpointing, visualization, and pause/resume capabilities. """ +from __future__ import annotations + from collections.abc import Mapping from pathlib import Path from typing import Any, cast @@ -506,7 +508,7 @@ def _create_agent_from_def( f"Invalid agent definition. Expected 'file', 'kind', or 'connection': {agent_def}" ) - def register_agent(self, name: str, agent: SupportsAgentRun | AgentExecutor) -> "WorkflowFactory": + def register_agent(self, name: str, agent: SupportsAgentRun | AgentExecutor) -> WorkflowFactory: """Register an agent instance with the factory for use in workflows. Registered agents are available to InvokeAzureAgent actions by name. @@ -552,7 +554,7 @@ def register_agent(self, name: str, agent: SupportsAgentRun | AgentExecutor) -> self._agents[name] = agent return self - def register_binding(self, name: str, func: Any) -> "WorkflowFactory": + def register_binding(self, name: str, func: Any) -> WorkflowFactory: """Register a function binding with the factory for use in workflow actions. Bindings allow workflow actions to invoke Python functions by name. diff --git a/python/packages/declarative/agent_framework_declarative/_workflows/_handlers.py b/python/packages/declarative/agent_framework_declarative/_workflows/_handlers.py index 64db7f43f6..cc529a2c8a 100644 --- a/python/packages/declarative/agent_framework_declarative/_workflows/_handlers.py +++ b/python/packages/declarative/agent_framework_declarative/_workflows/_handlers.py @@ -7,6 +7,8 @@ has a corresponding handler registered via the @action_handler decorator. """ +from __future__ import annotations + from collections.abc import AsyncGenerator, Callable from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable @@ -27,13 +29,13 @@ class ActionContext: for executing nested actions (for control flow constructs like Foreach). """ - state: "WorkflowState" + state: WorkflowState """The current workflow state with variables and agent results.""" action: dict[str, Any] """The action definition from the YAML.""" - execute_actions: "ExecuteActionsFn" + execute_actions: ExecuteActionsFn """Function to execute a list of nested actions (for Foreach, If, etc.).""" agents: dict[str, Any] @@ -150,7 +152,7 @@ class ActionHandler(Protocol): def __call__( self, ctx: ActionContext, - ) -> AsyncGenerator[WorkflowEvent, None]: + ) -> AsyncGenerator[WorkflowEvent]: """Execute the action and yield events. Args: diff --git a/python/packages/declarative/agent_framework_declarative/_workflows/_human_input.py b/python/packages/declarative/agent_framework_declarative/_workflows/_human_input.py index 97259807e7..e7e1da97e4 100644 --- a/python/packages/declarative/agent_framework_declarative/_workflows/_human_input.py +++ b/python/packages/declarative/agent_framework_declarative/_workflows/_human_input.py @@ -8,6 +8,8 @@ - ExternalLoop processing: Loop while waiting for external input """ +from __future__ import annotations + from collections.abc import AsyncGenerator from dataclasses import dataclass from typing import TYPE_CHECKING, Any, cast @@ -75,7 +77,7 @@ class ExternalLoopEvent(WorkflowEvent): @action_handler("Question") -async def handle_question(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_question(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Handle Question action - request human input with optional validation. Action schema: @@ -140,7 +142,7 @@ async def handle_question(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, N @action_handler("RequestExternalInput") -async def handle_request_external_input(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_request_external_input(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Handle RequestExternalInput action - request input from external system. Action schema: @@ -197,7 +199,7 @@ async def handle_request_external_input(ctx: ActionContext) -> AsyncGenerator[Wo @action_handler("WaitForInput") -async def handle_wait_for_input(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent, None]: # noqa: RUF029 +async def handle_wait_for_input(ctx: ActionContext) -> AsyncGenerator[WorkflowEvent]: # noqa: RUF029 """Handle WaitForInput action - pause and wait for external input. Action schema: @@ -231,7 +233,7 @@ async def handle_wait_for_input(ctx: ActionContext) -> AsyncGenerator[WorkflowEv def process_external_loop( input_config: dict[str, Any], - state: "WorkflowState", + state: WorkflowState, ) -> tuple[bool, str | None]: """Process the externalLoop.when pattern from action input. diff --git a/python/packages/declarative/agent_framework_declarative/_workflows/_powerfx_functions.py b/python/packages/declarative/agent_framework_declarative/_workflows/_powerfx_functions.py index 1cc8ce2cfb..df66ef59fd 100644 --- a/python/packages/declarative/agent_framework_declarative/_workflows/_powerfx_functions.py +++ b/python/packages/declarative/agent_framework_declarative/_workflows/_powerfx_functions.py @@ -10,6 +10,8 @@ or registered with the PowerFx engine when it is available. """ +from __future__ import annotations + from typing import Any, cast diff --git a/python/packages/declarative/agent_framework_declarative/_workflows/_state.py b/python/packages/declarative/agent_framework_declarative/_workflows/_state.py index 9fe57b83f5..7d1f9e4945 100644 --- a/python/packages/declarative/agent_framework_declarative/_workflows/_state.py +++ b/python/packages/declarative/agent_framework_declarative/_workflows/_state.py @@ -9,6 +9,8 @@ - Agent results and context """ +from __future__ import annotations + from collections.abc import Mapping from typing import Any, cast @@ -624,7 +626,7 @@ def reset_agent(self) -> None: """Reset the agent result for a new agent invocation.""" self._agent.clear() - def clone(self) -> "WorkflowState": + def clone(self) -> WorkflowState: """Create a shallow copy of the state. Returns: diff --git a/python/packages/declarative/pyproject.toml b/python/packages/declarative/pyproject.toml index ab43d50104..d8dcfa2f5f 100644 --- a/python/packages/declarative/pyproject.toml +++ b/python/packages/declarative/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "powerfx>=0.0.31; python_version < '3.14'", "pyyaml>=6.0,<7.0", ] diff --git a/python/packages/devui/agent_framework_devui/_conversations.py b/python/packages/devui/agent_framework_devui/_conversations.py index 741139e734..6b271ddff5 100644 --- a/python/packages/devui/agent_framework_devui/_conversations.py +++ b/python/packages/devui/agent_framework_devui/_conversations.py @@ -6,6 +6,8 @@ while wrapping AgentFramework's AgentThread underneath. """ +from __future__ import annotations + import time import uuid from abc import ABC, abstractmethod diff --git a/python/packages/devui/agent_framework_devui/_discovery.py b/python/packages/devui/agent_framework_devui/_discovery.py index 6bae42efac..8058d31083 100644 --- a/python/packages/devui/agent_framework_devui/_discovery.py +++ b/python/packages/devui/agent_framework_devui/_discovery.py @@ -2,6 +2,8 @@ """Agent Framework entity discovery implementation.""" +from __future__ import annotations + import ast import importlib import importlib.util diff --git a/python/packages/devui/agent_framework_devui/_executor.py b/python/packages/devui/agent_framework_devui/_executor.py index 0a487cbad3..c70c123983 100644 --- a/python/packages/devui/agent_framework_devui/_executor.py +++ b/python/packages/devui/agent_framework_devui/_executor.py @@ -2,6 +2,8 @@ """Agent Framework executor implementation.""" +from __future__ import annotations + import json import logging from collections.abc import AsyncGenerator @@ -184,7 +186,7 @@ def get_entity_info(self, entity_id: str) -> EntityInfo: raise EntityNotFoundError(f"Entity '{entity_id}' not found") return entity_info - async def execute_streaming(self, request: AgentFrameworkRequest) -> AsyncGenerator[Any, None]: + async def execute_streaming(self, request: AgentFrameworkRequest) -> AsyncGenerator[Any]: """Execute request and stream results in OpenAI format. Args: @@ -229,7 +231,7 @@ async def execute_sync(self, request: AgentFrameworkRequest) -> OpenAIResponse: # Aggregate into final response return await self.message_mapper.aggregate_to_response(events, request) - async def execute_entity(self, entity_id: str, request: AgentFrameworkRequest) -> AsyncGenerator[Any, None]: + async def execute_entity(self, entity_id: str, request: AgentFrameworkRequest) -> AsyncGenerator[Any]: """Execute the entity and yield raw Agent Framework events plus trace events. Args: @@ -286,7 +288,7 @@ async def execute_entity(self, entity_id: str, request: AgentFrameworkRequest) - async def _execute_agent( self, agent: SupportsAgentRun, request: AgentFrameworkRequest, trace_collector: Any - ) -> AsyncGenerator[Any, None]: + ) -> AsyncGenerator[Any]: """Execute Agent Framework agent with trace collection and optional thread support. Args: @@ -361,7 +363,7 @@ async def _execute_agent( async def _execute_workflow( self, workflow: Workflow, request: AgentFrameworkRequest, trace_collector: Any - ) -> AsyncGenerator[Any, None]: + ) -> AsyncGenerator[Any]: """Execute Agent Framework workflow with checkpoint support via conversation items. Args: diff --git a/python/packages/devui/agent_framework_devui/_mapper.py b/python/packages/devui/agent_framework_devui/_mapper.py index b956be3ac0..cb2ecacdd0 100644 --- a/python/packages/devui/agent_framework_devui/_mapper.py +++ b/python/packages/devui/agent_framework_devui/_mapper.py @@ -2,6 +2,8 @@ """Agent Framework message mapper implementation.""" +from __future__ import annotations + import json import logging import time diff --git a/python/packages/devui/agent_framework_devui/_openai/_executor.py b/python/packages/devui/agent_framework_devui/_openai/_executor.py index 1de05bfc2d..986d2d3a84 100644 --- a/python/packages/devui/agent_framework_devui/_openai/_executor.py +++ b/python/packages/devui/agent_framework_devui/_openai/_executor.py @@ -6,6 +6,8 @@ requests to OpenAI's API instead of executing local entities. """ +from __future__ import annotations + import logging import os from collections.abc import AsyncGenerator @@ -76,7 +78,7 @@ def _get_client(self) -> AsyncOpenAI: return self._client - async def execute_streaming(self, request: AgentFrameworkRequest) -> AsyncGenerator[Any, None]: + async def execute_streaming(self, request: AgentFrameworkRequest) -> AsyncGenerator[Any]: """Execute request via OpenAI and stream results in OpenAI format. This mirrors AgentFrameworkExecutor.execute_streaming() interface. diff --git a/python/packages/devui/agent_framework_devui/_server.py b/python/packages/devui/agent_framework_devui/_server.py index 6393f23b4a..1045c82923 100644 --- a/python/packages/devui/agent_framework_devui/_server.py +++ b/python/packages/devui/agent_framework_devui/_server.py @@ -2,6 +2,8 @@ """FastAPI server implementation.""" +from __future__ import annotations + import asyncio import importlib.metadata import inspect @@ -287,7 +289,7 @@ def create_app(self) -> FastAPI: """Create the FastAPI application.""" @asynccontextmanager - async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: + async def lifespan(app: FastAPI) -> AsyncGenerator[None]: # Startup logger.info("Starting Agent Framework Server") await self._ensure_executor() @@ -623,7 +625,7 @@ async def create_deployment(config: DeploymentConfig) -> StreamingResponse: entity_path = Path(entity_path_str) # Stream deployment events - async def event_generator() -> AsyncGenerator[str, None]: + async def event_generator() -> AsyncGenerator[str]: async for event in self.deployment_manager.deploy(config, entity_path): # Format as SSE import json @@ -1085,7 +1087,7 @@ async def delete_conversation_item(conversation_id: str, item_id: str) -> dict[s async def _stream_execution( self, executor: AgentFrameworkExecutor, request: AgentFrameworkRequest - ) -> AsyncGenerator[str, None]: + ) -> AsyncGenerator[str]: """Stream execution directly through executor.""" try: # Collect events for final response.completed event @@ -1155,7 +1157,7 @@ async def _stream_execution( async def _stream_openai_execution( self, executor: OpenAIExecutor, request: AgentFrameworkRequest - ) -> AsyncGenerator[str, None]: + ) -> AsyncGenerator[str]: """Stream execution through OpenAI executor. OpenAI events are already in final format - no conversion or aggregation needed. @@ -1212,7 +1214,7 @@ async def _stream_openai_execution( async def _stream_with_cancellation( self, executor: AgentFrameworkExecutor, request: AgentFrameworkRequest, response_id: str - ) -> AsyncGenerator[str, None]: + ) -> AsyncGenerator[str]: """Stream execution with automatic cancellation on client disconnect. This wrapper adds cancellation support to the execution stream: @@ -1231,7 +1233,7 @@ async def _stream_with_cancellation( """ task = None - async def execution_wrapper() -> AsyncGenerator[str, None]: + async def execution_wrapper() -> AsyncGenerator[str]: """Inner wrapper to handle the actual execution.""" try: logger.debug(f"[CANCELLATION] Starting execution for {response_id}") diff --git a/python/packages/devui/agent_framework_devui/_tracing.py b/python/packages/devui/agent_framework_devui/_tracing.py index 3fc45398bf..81eec75c42 100644 --- a/python/packages/devui/agent_framework_devui/_tracing.py +++ b/python/packages/devui/agent_framework_devui/_tracing.py @@ -2,6 +2,8 @@ """Simplified tracing integration for Agent Framework Server.""" +from __future__ import annotations + import logging from collections.abc import Generator, Sequence from contextlib import contextmanager @@ -120,9 +122,7 @@ def _convert_span_to_trace_event(self, span: Any) -> ResponseTraceEvent | None: @contextmanager -def capture_traces( - response_id: str | None = None, entity_id: str | None = None -) -> Generator[SimpleTraceCollector, None, None]: +def capture_traces(response_id: str | None = None, entity_id: str | None = None) -> Generator[SimpleTraceCollector]: """Context manager to capture traces during execution. Args: diff --git a/python/packages/devui/frontend/src/components/features/agent/agent-view.tsx b/python/packages/devui/frontend/src/components/features/agent/agent-view.tsx index 38b26324c1..8f1b57786a 100644 --- a/python/packages/devui/frontend/src/components/features/agent/agent-view.tsx +++ b/python/packages/devui/frontend/src/components/features/agent/agent-view.tsx @@ -559,7 +559,7 @@ export function AgentView({ selectedAgent, onDebugEvent }: AgentViewProps) { // Backend successfully returned conversations list setAvailableConversations(conversations); - + if (conversations.length > 0) { // Found conversations on backend - use most recent const mostRecent = conversations[0]; @@ -614,7 +614,7 @@ export function AgentView({ selectedAgent, onDebugEvent }: AgentViewProps) { // Check for incomplete stream and resume if needed const state = loadStreamingState(mostRecent.id); - + if (state && !state.completed) { accumulatedTextRef.current = state.accumulatedText || ""; // Add assistant message with resumed text diff --git a/python/packages/devui/frontend/src/components/features/workflow/workflow-flow.tsx b/python/packages/devui/frontend/src/components/features/workflow/workflow-flow.tsx index 1dc586ec0e..9f4a5a0ae8 100644 --- a/python/packages/devui/frontend/src/components/features/workflow/workflow-flow.tsx +++ b/python/packages/devui/frontend/src/components/features/workflow/workflow-flow.tsx @@ -565,7 +565,7 @@ export const WorkflowFlow = memo(function WorkflowFlow({ 0% { stroke-dashoffset: 0; } 100% { stroke-dashoffset: -10; } } - + /* Dark theme styles for React Flow controls */ .dark .react-flow__controls { background-color: rgba(31, 41, 55, 0.9) !important; diff --git a/python/packages/devui/frontend/src/components/ui/loading-spinner.tsx b/python/packages/devui/frontend/src/components/ui/loading-spinner.tsx index 00b2e65f02..ea22f1de01 100644 --- a/python/packages/devui/frontend/src/components/ui/loading-spinner.tsx +++ b/python/packages/devui/frontend/src/components/ui/loading-spinner.tsx @@ -13,7 +13,7 @@ export function LoadingSpinner({ size = "md", className }: LoadingSpinnerProps) "animate-spin", { "h-4 w-4": size === "sm", - "h-6 w-6": size === "md", + "h-6 w-6": size === "md", "h-8 w-8": size === "lg", }, className diff --git a/python/packages/devui/frontend/src/components/ui/loading-state.tsx b/python/packages/devui/frontend/src/components/ui/loading-state.tsx index ac4303a981..ba8110c11a 100644 --- a/python/packages/devui/frontend/src/components/ui/loading-state.tsx +++ b/python/packages/devui/frontend/src/components/ui/loading-state.tsx @@ -9,8 +9,8 @@ interface LoadingStateProps { fullPage?: boolean } -export function LoadingState({ - message = "Loading...", +export function LoadingState({ + message = "Loading...", description, size = "md", className, diff --git a/python/packages/devui/frontend/src/services/streaming-state.ts b/python/packages/devui/frontend/src/services/streaming-state.ts index 86116b2adf..290492bbc8 100644 --- a/python/packages/devui/frontend/src/services/streaming-state.ts +++ b/python/packages/devui/frontend/src/services/streaming-state.ts @@ -1,6 +1,6 @@ /** * Streaming State Persistence - * + * * Manages browser storage of streaming response state to enable: * - Resume interrupted streams after page refresh * - Replay cached events before fetching new ones @@ -73,7 +73,7 @@ export function loadStreamingState(conversationId: string): StreamingState | nul try { const key = getStorageKey(conversationId); const data = localStorage.getItem(key); - + if (!data) { return null; } @@ -111,9 +111,9 @@ export function updateStreamingState( try { const existing = loadStreamingState(conversationId); const sequenceNumber = "sequence_number" in event ? event.sequence_number : undefined; - + const newEvents = existing ? [...existing.events, event] : [event]; - + const state: StreamingState = { conversationId, responseId, @@ -174,7 +174,7 @@ export function clearExpiredStreamingStates(): void { if (data) { const state: StreamingState = JSON.parse(data); const age = now - state.timestamp; - + if (age > STATE_EXPIRY_MS || state.completed) { localStorage.removeItem(key); } diff --git a/python/packages/devui/pyproject.toml b/python/packages/devui/pyproject.toml index 2b5cbf9184..6dbdd27f3c 100644 --- a/python/packages/devui/pyproject.toml +++ b/python/packages/devui/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "fastapi>=0.104.0", "uvicorn[standard]>=0.24.0", "python-dotenv>=1.0.0", diff --git a/python/packages/durabletask/README.md b/python/packages/durabletask/README.md index 083de30871..aa67c9b3da 100644 --- a/python/packages/durabletask/README.md +++ b/python/packages/durabletask/README.md @@ -15,15 +15,15 @@ The durable task integration lets you host Microsoft Agent Framework agents usin ### Basic Usage Example ```python -from durabletask import TaskHubGrpcWorker +from durabletask.worker import TaskHubGrpcWorker from agent_framework.azure import DurableAIAgentWorker # Create the worker with TaskHubGrpcWorker(...) as worker: - + # Register the agent worker wrapper agent_worker = DurableAIAgentWorker(worker) - + # Register the agent agent_worker.add_agent(my_agent) ``` diff --git a/python/packages/durabletask/pyproject.toml b/python/packages/durabletask/pyproject.toml index 99460344fc..ea989cfd24 100644 --- a/python/packages/durabletask/pyproject.toml +++ b/python/packages/durabletask/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "durabletask>=1.3.0", "durabletask-azuremanaged>=1.3.0", "python-dateutil>=2.8.0", diff --git a/python/packages/foundry_local/pyproject.toml b/python/packages/foundry_local/pyproject.toml index 1a338ede43..7e94a0691f 100644 --- a/python/packages/foundry_local/pyproject.toml +++ b/python/packages/foundry_local/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "foundry-local-sdk>=0.5.1,<1", ] diff --git a/python/packages/foundry_local/samples/foundry_local_agent.py b/python/packages/foundry_local/samples/foundry_local_agent.py index 6d4705f8cb..9e81d2b33d 100644 --- a/python/packages/foundry_local/samples/foundry_local_agent.py +++ b/python/packages/foundry_local/samples/foundry_local_agent.py @@ -1,6 +1,8 @@ # Copyright (c) Microsoft. All rights reserved. # ruff: noqa +from __future__ import annotations + import asyncio from random import randint from typing import TYPE_CHECKING, Annotated @@ -31,7 +33,7 @@ def get_weather( return f"The weather in {location} is {conditions[randint(0, 3)]} with a high of {randint(10, 30)}°C." -async def non_streaming_example(agent: "ChatAgent") -> None: +async def non_streaming_example(agent: ChatAgent) -> None: """Example of non-streaming response (get the complete result at once).""" print("=== Non-streaming Response Example ===") @@ -41,7 +43,7 @@ async def non_streaming_example(agent: "ChatAgent") -> None: print(f"Agent: {result}\n") -async def streaming_example(agent: "ChatAgent") -> None: +async def streaming_example(agent: ChatAgent) -> None: """Example of streaming response (get results as they are generated).""" print("=== Streaming Response Example ===") diff --git a/python/packages/github_copilot/agent_framework_github_copilot/_agent.py b/python/packages/github_copilot/agent_framework_github_copilot/_agent.py index 8fa7e3c6a2..46a92a6dc9 100644 --- a/python/packages/github_copilot/agent_framework_github_copilot/_agent.py +++ b/python/packages/github_copilot/agent_framework_github_copilot/_agent.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import asyncio import contextlib import logging @@ -223,7 +225,7 @@ def __init__( self._default_options = opts self._started = False - async def __aenter__(self) -> "GitHubCopilotAgent[TOptions]": + async def __aenter__(self) -> GitHubCopilotAgent[TOptions]: """Start the agent when entering async context.""" await self.start() return self diff --git a/python/packages/github_copilot/pyproject.toml b/python/packages/github_copilot/pyproject.toml index a80197d2d7..57e3c536f8 100644 --- a/python/packages/github_copilot/pyproject.toml +++ b/python/packages/github_copilot/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "github-copilot-sdk>=0.1.0", ] diff --git a/python/packages/lab/gaia/agent_framework_lab_gaia/gaia.py b/python/packages/lab/gaia/agent_framework_lab_gaia/gaia.py index a9ca38bc96..d031e7e69f 100644 --- a/python/packages/lab/gaia/agent_framework_lab_gaia/gaia.py +++ b/python/packages/lab/gaia/agent_framework_lab_gaia/gaia.py @@ -371,6 +371,7 @@ def _ensure_data(self) -> Path: local_dir = snapshot_download( # type: ignore repo_id="gaia-benchmark/GAIA", repo_type="dataset", + revision="682dd723ee1e1697e00360edccf2366dc8418dd9", token=token, local_dir=str(self.data_dir), force_download=False, diff --git a/python/packages/lab/lightning/samples/train_math_agent.py b/python/packages/lab/lightning/samples/train_math_agent.py index 2c6937446e..0cb771e856 100644 --- a/python/packages/lab/lightning/samples/train_math_agent.py +++ b/python/packages/lab/lightning/samples/train_math_agent.py @@ -8,6 +8,8 @@ One GPU with 40GB of memory is sufficient for this sample. """ +from __future__ import annotations + import argparse import asyncio import json diff --git a/python/packages/lab/lightning/samples/train_tau2_agent.py b/python/packages/lab/lightning/samples/train_tau2_agent.py index 70d80a4c12..e9514b6f77 100644 --- a/python/packages/lab/lightning/samples/train_tau2_agent.py +++ b/python/packages/lab/lightning/samples/train_tau2_agent.py @@ -12,6 +12,8 @@ Requires one GPU of at least 80GB of memory. """ +from __future__ import annotations + import argparse import asyncio import json diff --git a/python/packages/lab/pyproject.toml b/python/packages/lab/pyproject.toml index 22eb969bd1..9431560d59 100644 --- a/python/packages/lab/pyproject.toml +++ b/python/packages/lab/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Programming Language :: Python :: 3.14", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", ] [project.optional-dependencies] @@ -57,7 +57,6 @@ math = [ [dependency-groups] dev = [ "uv", - "pre-commit >= 3.7", "ruff>=0.11.8", "pytest>=8.4.1", "mypy>=1.16.1", @@ -69,6 +68,7 @@ dev = [ "tomli-w", # tau2 from source (not available on PyPI) "tau2@ git+https://github.com/sierra-research/tau2-bench@5ba9e3e56db57c5e4114bf7f901291f09b2c5619", + "prek>=0.3.2", ] [project.scripts] diff --git a/python/packages/lab/tau2/agent_framework_lab_tau2/runner.py b/python/packages/lab/tau2/agent_framework_lab_tau2/runner.py index c2e5ff6816..326aaf0748 100644 --- a/python/packages/lab/tau2/agent_framework_lab_tau2/runner.py +++ b/python/packages/lab/tau2/agent_framework_lab_tau2/runner.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import uuid from typing import cast @@ -90,7 +92,7 @@ def __init__(self, max_steps: int, assistant_sampling_temperature: float = 0.0, self.max_steps = max_steps self.reinit() - def reinit(self) -> "TaskRunner": + def reinit(self) -> TaskRunner: """Reset all state for a new task run.""" self.step_count = 0 self.full_conversation = [] diff --git a/python/packages/mem0/agent_framework_mem0/_provider.py b/python/packages/mem0/agent_framework_mem0/_provider.py index 0d12f06e5f..0dbad13134 100644 --- a/python/packages/mem0/agent_framework_mem0/_provider.py +++ b/python/packages/mem0/agent_framework_mem0/_provider.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import sys from collections.abc import MutableSequence, Sequence from contextlib import AbstractAsyncContextManager @@ -78,7 +80,7 @@ def __init__( self._per_operation_thread_id: str | None = None self._should_close_client = should_close_client - async def __aenter__(self) -> "Self": + async def __aenter__(self) -> Self: """Async context manager entry.""" if self.mem0_client and isinstance(self.mem0_client, AbstractAsyncContextManager): await self.mem0_client.__aenter__() diff --git a/python/packages/mem0/pyproject.toml b/python/packages/mem0/pyproject.toml index 26e8343aa9..016ad77bd6 100644 --- a/python/packages/mem0/pyproject.toml +++ b/python/packages/mem0/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "mem0ai>=1.0.0", ] diff --git a/python/packages/ollama/README.md b/python/packages/ollama/README.md index d49879c7cc..f67d538507 100644 --- a/python/packages/ollama/README.md +++ b/python/packages/ollama/README.md @@ -10,4 +10,4 @@ and see the [README](https://github.com/microsoft/agent-framework/tree/main/pyth # Run samples with the Ollama Conector -You can find samples how to run the connector under the [Getting_started] (./getting_started/README.md) folder +You can find samples how to run the connector under the [Getting_started] (./getting_started/README.md) folder diff --git a/python/packages/ollama/agent_framework_ollama/_chat_client.py b/python/packages/ollama/agent_framework_ollama/_chat_client.py index 6b4b55faac..684e5c6d9d 100644 --- a/python/packages/ollama/agent_framework_ollama/_chat_client.py +++ b/python/packages/ollama/agent_framework_ollama/_chat_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import json import sys from collections.abc import ( diff --git a/python/packages/ollama/pyproject.toml b/python/packages/ollama/pyproject.toml index e050978fca..0534b7c37a 100644 --- a/python/packages/ollama/pyproject.toml +++ b/python/packages/ollama/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "ollama >= 0.5.3", ] diff --git a/python/packages/ollama/tests/conftest.py b/python/packages/ollama/tests/conftest.py deleted file mode 100644 index dd2529c98c..0000000000 --- a/python/packages/ollama/tests/conftest.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -from typing import Any - -from agent_framework import ChatMessage -from pytest import fixture - - -# region: Connector Settings fixtures -@fixture -def exclude_list(request: Any) -> list[str]: - """Fixture that returns a list of environment variables to exclude.""" - return request.param if hasattr(request, "param") else [] - - -@fixture -def override_env_param_dict(request: Any) -> dict[str, str]: - """Fixture that returns a dict of environment variables to override.""" - return request.param if hasattr(request, "param") else {} - - -# These two fixtures are used for multiple things, also non-connector tests -@fixture() -def ollama_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): # type: ignore - """Fixture to set environment variables for OllamaSettings.""" - - if exclude_list is None: - exclude_list = [] - - if override_env_param_dict is None: - override_env_param_dict = {} - - env_vars = {"OLLAMA_HOST": "http://localhost:12345", "OLLAMA_MODEL_ID": "test"} - - env_vars.update(override_env_param_dict) # type: ignore - - for key, value in env_vars.items(): - if key in exclude_list: - monkeypatch.delenv(key, raising=False) # type: ignore - continue - monkeypatch.setenv(key, value) # type: ignore - - return env_vars - - -@fixture -def chat_history() -> list[ChatMessage]: - return [] diff --git a/python/packages/ollama/tests/test_ollama_chat_client.py b/python/packages/ollama/tests/test_ollama_chat_client.py index efe6d70890..095942fd6c 100644 --- a/python/packages/ollama/tests/test_ollama_chat_client.py +++ b/python/packages/ollama/tests/test_ollama_chat_client.py @@ -2,6 +2,7 @@ import os from collections.abc import AsyncIterable +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -23,6 +24,7 @@ from ollama._types import ChatResponse as OllamaChatResponse from ollama._types import Message as OllamaMessage from openai import AsyncStream +from pytest import fixture from agent_framework_ollama import OllamaChatClient @@ -37,7 +39,49 @@ ) -@pytest.fixture +# region: Connector Settings fixtures +@fixture +def exclude_list(request: Any) -> list[str]: + """Fixture that returns a list of environment variables to exclude.""" + return request.param if hasattr(request, "param") else [] + + +@fixture +def override_env_param_dict(request: Any) -> dict[str, str]: + """Fixture that returns a dict of environment variables to override.""" + return request.param if hasattr(request, "param") else {} + + +# These two fixtures are used for multiple things, also non-connector tests +@fixture() +def ollama_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): # type: ignore + """Fixture to set environment variables for OllamaSettings.""" + + if exclude_list is None: + exclude_list = [] + + if override_env_param_dict is None: + override_env_param_dict = {} + + env_vars = {"OLLAMA_HOST": "http://localhost:12345", "OLLAMA_MODEL_ID": "test"} + + env_vars.update(override_env_param_dict) # type: ignore + + for key, value in env_vars.items(): + if key in exclude_list: + monkeypatch.delenv(key, raising=False) # type: ignore + continue + monkeypatch.setenv(key, value) # type: ignore + + return env_vars + + +@fixture +def chat_history() -> list[ChatMessage]: + return [] + + +@fixture def mock_streaming_chat_completion_response() -> AsyncStream[OllamaChatResponse]: response = OllamaChatResponse( message=OllamaMessage(content="test", role="assistant"), @@ -48,7 +92,7 @@ def mock_streaming_chat_completion_response() -> AsyncStream[OllamaChatResponse] return stream -@pytest.fixture +@fixture def mock_streaming_chat_completion_response_reasoning() -> AsyncStream[OllamaChatResponse]: response = OllamaChatResponse( message=OllamaMessage(thinking="test", role="assistant"), @@ -59,7 +103,7 @@ def mock_streaming_chat_completion_response_reasoning() -> AsyncStream[OllamaCha return stream -@pytest.fixture +@fixture def mock_chat_completion_response() -> OllamaChatResponse: return OllamaChatResponse( message=OllamaMessage(content="test", role="assistant"), @@ -70,7 +114,7 @@ def mock_chat_completion_response() -> OllamaChatResponse: ) -@pytest.fixture +@fixture def mock_chat_completion_response_reasoning() -> OllamaChatResponse: return OllamaChatResponse( message=OllamaMessage(thinking="test", role="assistant"), @@ -81,7 +125,7 @@ def mock_chat_completion_response_reasoning() -> OllamaChatResponse: ) -@pytest.fixture +@fixture def mock_streaming_chat_completion_tool_call() -> AsyncStream[OllamaChatResponse]: ollama_tool_call = OllamaChatResponse( message=OllamaMessage( @@ -96,7 +140,7 @@ def mock_streaming_chat_completion_tool_call() -> AsyncStream[OllamaChatResponse return stream -@pytest.fixture +@fixture def mock_chat_completion_tool_call() -> OllamaChatResponse: return OllamaChatResponse( message=OllamaMessage( diff --git a/python/packages/orchestrations/README.md b/python/packages/orchestrations/README.md index 7ffc75e00d..f965111712 100644 --- a/python/packages/orchestrations/README.md +++ b/python/packages/orchestrations/README.md @@ -5,7 +5,7 @@ Orchestration patterns for Microsoft Agent Framework. This package provides high ## Installation ```bash -pip install agent-framework-orchestrations +pip install agent-framework-orchestrations --pre ``` ## Orchestration Patterns @@ -15,9 +15,9 @@ pip install agent-framework-orchestrations Chain agents/executors in sequence, passing conversation context along: ```python -from agent_framework_orchestrations import SequentialBuilder +from agent_framework.orchestrations import SequentialBuilder -workflow = SequentialBuilder().participants([agent1, agent2, agent3]).build() +workflow = SequentialBuilder(participants=[agent1, agent2, agent3]).build() ``` ### ConcurrentBuilder @@ -25,9 +25,9 @@ workflow = SequentialBuilder().participants([agent1, agent2, agent3]).build() Fan-out to multiple agents in parallel, then aggregate results: ```python -from agent_framework_orchestrations import ConcurrentBuilder +from agent_framework.orchestrations import ConcurrentBuilder -workflow = ConcurrentBuilder().participants([agent1, agent2, agent3]).build() +workflow = ConcurrentBuilder(participants=[agent1, agent2, agent3]).build() ``` ### HandoffBuilder @@ -35,7 +35,7 @@ workflow = ConcurrentBuilder().participants([agent1, agent2, agent3]).build() Decentralized agent routing where agents decide handoff targets: ```python -from agent_framework_orchestrations import HandoffBuilder +from agent_framework.orchestrations import HandoffBuilder workflow = ( HandoffBuilder() @@ -50,7 +50,7 @@ workflow = ( Orchestrator-directed multi-agent conversations: ```python -from agent_framework_orchestrations import GroupChatBuilder +from agent_framework.orchestrations import GroupChatBuilder workflow = GroupChatBuilder( participants=[agent1, agent2], @@ -63,7 +63,7 @@ workflow = GroupChatBuilder( Sophisticated multi-agent orchestration using the Magentic One pattern: ```python -from agent_framework_orchestrations import MagenticBuilder +from agent_framework.orchestrations import MagenticBuilder workflow = MagenticBuilder( participants=[researcher, writer, reviewer], @@ -71,14 +71,6 @@ workflow = MagenticBuilder( ).build() ``` -## Usage with agent_framework - -You can also import orchestrations through the main agent_framework package: - -```python -from agent_framework.orchestrations import SequentialBuilder, ConcurrentBuilder -``` - ## Documentation For more information, see the [Agent Framework documentation](https://aka.ms/agent-framework). diff --git a/python/packages/orchestrations/agent_framework_orchestrations/_group_chat.py b/python/packages/orchestrations/agent_framework_orchestrations/_group_chat.py index 3ed609c483..f4edbbdcb1 100644 --- a/python/packages/orchestrations/agent_framework_orchestrations/_group_chat.py +++ b/python/packages/orchestrations/agent_framework_orchestrations/_group_chat.py @@ -18,6 +18,8 @@ existing observability and streaming semantics continue to apply. """ +from __future__ import annotations + import inspect import logging import sys @@ -691,7 +693,7 @@ def _set_participants(self, participants: Sequence[SupportsAgentRun | Executor]) self._participants = named - def with_termination_condition(self, termination_condition: TerminationCondition) -> "GroupChatBuilder": + def with_termination_condition(self, termination_condition: TerminationCondition) -> GroupChatBuilder: """Set a custom termination condition for the group chat workflow. Args: @@ -732,7 +734,7 @@ def stop_after_two_calls(conversation: list[ChatMessage]) -> bool: self._termination_condition = termination_condition return self - def with_max_rounds(self, max_rounds: int | None) -> "GroupChatBuilder": + def with_max_rounds(self, max_rounds: int | None) -> GroupChatBuilder: """Set a maximum number of orchestrator rounds to prevent infinite conversations. When the round limit is reached, the workflow automatically completes with @@ -750,7 +752,7 @@ def with_max_rounds(self, max_rounds: int | None) -> "GroupChatBuilder": self._max_rounds = max_rounds return self - def with_checkpointing(self, checkpoint_storage: CheckpointStorage) -> "GroupChatBuilder": + def with_checkpointing(self, checkpoint_storage: CheckpointStorage) -> GroupChatBuilder: """Enable checkpointing for the built workflow using the provided storage. Checkpointing allows the workflow to persist state and resume from interruption @@ -782,7 +784,7 @@ def with_checkpointing(self, checkpoint_storage: CheckpointStorage) -> "GroupCha self._checkpoint_storage = checkpoint_storage return self - def with_request_info(self, *, agents: Sequence[str | SupportsAgentRun] | None = None) -> "GroupChatBuilder": + def with_request_info(self, *, agents: Sequence[str | SupportsAgentRun] | None = None) -> GroupChatBuilder: """Enable request info after agent participant responses. This enables human-in-the-loop (HIL) scenarios for the group chat orchestration. diff --git a/python/packages/orchestrations/pyproject.toml b/python/packages/orchestrations/pyproject.toml index 60ce61be1b..6e3ab8f46e 100644 --- a/python/packages/orchestrations/pyproject.toml +++ b/python/packages/orchestrations/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", ] [tool.uv] diff --git a/python/packages/purview/agent_framework_purview/_client.py b/python/packages/purview/agent_framework_purview/_client.py index 351c1a6bee..3eda072ca3 100644 --- a/python/packages/purview/agent_framework_purview/_client.py +++ b/python/packages/purview/agent_framework_purview/_client.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import base64 import inspect import json diff --git a/python/packages/purview/agent_framework_purview/_models.py b/python/packages/purview/agent_framework_purview/_models.py index e4c27496a9..4e14147ac5 100644 --- a/python/packages/purview/agent_framework_purview/_models.py +++ b/python/packages/purview/agent_framework_purview/_models.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + from collections.abc import Mapping, MutableMapping, Sequence from datetime import datetime from enum import Enum, Flag, auto diff --git a/python/packages/purview/pyproject.toml b/python/packages/purview/pyproject.toml index df243154fc..a1e6456cf3 100644 --- a/python/packages/purview/pyproject.toml +++ b/python/packages/purview/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "azure-core>=1.30.0", "httpx>=0.27.0", ] diff --git a/python/packages/redis/README.md b/python/packages/redis/README.md index d0492f592a..3c6c5bb18c 100644 --- a/python/packages/redis/README.md +++ b/python/packages/redis/README.md @@ -32,7 +32,7 @@ The `RedisChatMessageStore` provides persistent conversation storage using Redis See the complete [Redis chat message store examples](../../samples/getting_started/threads/redis_chat_message_store_thread.py) including: - User session management -- Conversation persistence across restarts +- Conversation persistence across restarts - Thread serialization and deserialization - Automatic message trimming - Error handling patterns diff --git a/python/packages/redis/agent_framework_redis/_provider.py b/python/packages/redis/agent_framework_redis/_provider.py index 98c1195600..f8449962b7 100644 --- a/python/packages/redis/agent_framework_redis/_provider.py +++ b/python/packages/redis/agent_framework_redis/_provider.py @@ -1,5 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. +from __future__ import annotations + import json import sys from collections.abc import MutableSequence, Sequence diff --git a/python/packages/redis/pyproject.toml b/python/packages/redis/pyproject.toml index 14c75ba37c..30ec085706 100644 --- a/python/packages/redis/pyproject.toml +++ b/python/packages/redis/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Typing :: Typed", ] dependencies = [ - "agent-framework-core", + "agent-framework-core>=1.0.0b260130", "redis>=6.4.0", "redisvl>=0.8.2", "numpy>=2.2.6" diff --git a/python/pyproject.toml b/python/pyproject.toml index 60d70f1f68..af88ff92db 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -30,12 +30,10 @@ dependencies = [ dev = [ "uv>=0.9,<1.0.0", "flit>=3.12.0", - "pre-commit >= 3.7", "ruff>=0.11.8", "pytest>=8.4.1", "pytest-asyncio>=1.0.0", "pytest-cov>=6.2.1", - "pytest-env>=1.1.5", "pytest-xdist[psutil]>=3.8.0", "pytest-timeout>=2.3.1", "pytest-retry>=1", @@ -45,16 +43,7 @@ dev = [ "poethepoet>=0.36.0", "rich", "tomli", - "tomli-w", - # AutoGen migration samples - "autogen-agentchat", - "autogen-ext[openai]", -] -docs = [ - # Documentation - "debugpy>=1.8.16", - "py2docfx>=0.1.22.dev2259826", - "pip", + "prek>=0.3.2", ] [tool.uv] @@ -111,7 +100,7 @@ line-length = 120 target-version = "py310" fix = true include = ["*.py", "*.pyi", "**/pyproject.toml", "*.ipynb"] -exclude = ["docs/*", "run_tasks_in_packages_if_exists.py", "check_md_code_blocks.py"] +exclude = ["scripts"] extend-exclude = [ "[{][{]cookiecutter.package_name[}][}]", ] @@ -157,7 +146,7 @@ ignore = [ [tool.ruff.lint.per-file-ignores] # Ignore all directories named `tests` and `samples`. "**/tests/**" = ["D", "INP", "TD", "ERA001", "RUF", "S"] -"samples/**" = ["D", "INP", "ERA001", "RUF", "S", "T201"] +"samples/**" = ["D", "INP", "ERA001", "RUF", "S", "T201", "CPY"] "*.ipynb" = ["CPY", "E501"] [tool.ruff.format] @@ -191,7 +180,7 @@ omit = [ [tool.pyright] include = ["agent_framework*"] -exclude = ["**/tests/**", "docs", "**/.venv/**", "packages/devui/frontend/**"] +exclude = ["**/tests/**", "**/.venv/**", "packages/devui/frontend/**"] typeCheckingMode = "strict" reportUnnecessaryIsInstance = false reportMissingTypeStubs = false @@ -212,35 +201,36 @@ disallow_untyped_decorators = true [tool.bandit] targets = ["agent_framework"] -exclude_dirs = ["tests", "./run_tasks_in_packages_if_exists.py", "./check_md_code_blocks.py", "docs", "samples"] +exclude_dirs = ["tests", "scripts", "samples"] [tool.poe] executor.type = "uv" [tool.poe.tasks] -markdown-code-lint = "uv run python check_md_code_blocks.py 'README.md' './packages/**/README.md' './samples/**/*.md' --exclude cookiecutter-agent-framework-lab --exclude tau2 --exclude 'packages/devui/frontend'" -pre-commit-install = "uv run pre-commit install --install-hooks --overwrite" -install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit --no-group=docs" -test = "python run_tasks_in_packages_if_exists.py test" -fmt = "python run_tasks_in_packages_if_exists.py fmt" +markdown-code-lint = "uv run python scripts/check_md_code_blocks.py 'README.md' './packages/**/README.md' './samples/**/*.md' --exclude cookiecutter-agent-framework-lab --exclude tau2 --exclude 'packages/devui/frontend' --exclude context_providers/azure_ai_search" +prek-install = "prek install --overwrite" +install = "uv sync --all-packages --all-extras --dev -U --prerelease=if-necessary-or-explicit" +test = "python scripts/run_tasks_in_packages_if_exists.py test" +fmt = "python scripts/run_tasks_in_packages_if_exists.py fmt" format.ref = "fmt" -lint = "python run_tasks_in_packages_if_exists.py lint" +lint = "python scripts/run_tasks_in_packages_if_exists.py lint" samples-lint = "ruff check samples --fix --exclude samples/autogen-migration,samples/semantic-kernel-migration --ignore E501,ASYNC,B901,TD002" -pyright = "python run_tasks_in_packages_if_exists.py pyright" -mypy = "python run_tasks_in_packages_if_exists.py mypy" +pyright = "python scripts/run_tasks_in_packages_if_exists.py pyright" +mypy = "python scripts/run_tasks_in_packages_if_exists.py mypy" samples-syntax = "pyright -p pyrightconfig.samples.json --warnings" typing = ["pyright", "mypy"] # cleaning -clean-dist-packages = "python run_tasks_in_packages_if_exists.py clean-dist" +clean-dist-packages = "python scripts/run_tasks_in_packages_if_exists.py clean-dist" clean-dist-meta = "rm -rf dist" clean-dist = ["clean-dist-packages", "clean-dist-meta"] # build and publish -build-packages = "python run_tasks_in_packages_if_exists.py build" +build-packages = "python scripts/run_tasks_in_packages_if_exists.py build" build-meta = "python -m flit build" build = ["build-packages", "build-meta"] publish = "uv publish" # combined checks -check = ["fmt", "lint", "pyright", "mypy", "samples-lint", "samples-syntax", "test", "markdown-code-lint"] +check-packages = "python scripts/run_tasks_in_packages_if_exists.py fmt lint pyright mypy" +check = ["check-packages", "samples-lint", "samples-syntax", "test", "markdown-code-lint"] [tool.poe.tasks.all-tests-cov] cmd = """ @@ -285,16 +275,41 @@ args = [{ name = "python", default = "3.13", options = ['-p', '--python'] }] sequence = [ { ref = "venv --python $python"}, { ref = "install" }, - { ref = "pre-commit-install" } + { ref = "prek-install" } ] args = [{ name = "python", default = "3.13", options = ['-p', '--python'] }] -[tool.poe.tasks.pre-commit-markdown-code-lint] -cmd = "uv run python check_md_code_blocks.py ${files} --no-glob --exclude cookiecutter-agent-framework-lab --exclude tau2 --exclude 'packages/devui/frontend'" +[tool.poe.tasks.prek-pyright] +cmd = "uv run python scripts/run_tasks_in_changed_packages.py pyright --files ${files}" +args = [{ name = "files", default = ".", positional = true, multiple = true }] + +[tool.poe.tasks.prek-check-packages] +cmd = "uv run python scripts/run_tasks_in_changed_packages.py fmt lint pyright --files ${files}" +args = [{ name = "files", default = ".", positional = true, multiple = true }] + +[tool.poe.tasks.prek-markdown-code-lint] +cmd = """uv run python scripts/check_md_code_blocks.py ${files} --no-glob + --exclude cookiecutter-agent-framework-lab --exclude tau2 + --exclude packages/devui/frontend --exclude context_providers/azure_ai_search""" args = [{ name = "files", default = ".", positional = true, multiple = true }] -[tool.poe.tasks.pre-commit-pyright] -cmd = "uv run python run_tasks_in_changed_packages.py pyright ${files}" +[tool.poe.tasks.prek-samples-check] +shell = """ +HAS_SAMPLES=false +for f in ${files}; do + case "$f" in + samples/*) HAS_SAMPLES=true; break ;; + esac +done +if [ "$HAS_SAMPLES" = true ]; then + echo "Sample files changed, running samples checks..." + uv run ruff check samples --fix --exclude samples/autogen-migration,samples/semantic-kernel-migration --ignore E501,ASYNC,B901,TD002 + uv run pyright -p pyrightconfig.samples.json --warnings +else + echo "No sample files changed, skipping samples checks" +fi +""" +interpreter = "bash" args = [{ name = "files", default = ".", positional = true, multiple = true }] @@ -316,18 +331,15 @@ else echo ".") fi echo "Changed files: $CHANGED_FILES" -uv run python run_tasks_in_changed_packages.py mypy $CHANGED_FILES +uv run python scripts/run_tasks_in_changed_packages.py mypy --files $CHANGED_FILES """ interpreter = "bash" -[tool.poe.tasks.pre-commit-check] +[tool.poe.tasks.prek-check] sequence = [ - { ref = "fmt" }, - { ref = "lint" }, - { ref = "pre-commit-pyright ${files}" }, - { ref = "pre-commit-markdown-code-lint ${files}" }, - { ref = "samples-lint" }, - { ref = "samples-syntax" } + { ref = "prek-check-packages ${files}" }, + { ref = "prek-markdown-code-lint ${files}" }, + { ref = "prek-samples-check ${files}" } ] args = [{ name = "files", default = ".", positional = true, multiple = true }] diff --git a/python/run_tasks_in_changed_packages.py b/python/run_tasks_in_changed_packages.py deleted file mode 100644 index a0071ceaf8..0000000000 --- a/python/run_tasks_in_changed_packages.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -"""Run a task only in packages that have changed files.""" - -import argparse -import glob -import sys -from pathlib import Path - -import tomli -from poethepoet.app import PoeThePoet -from rich import print - - -def discover_projects(workspace_pyproject_file: Path) -> list[Path]: - with workspace_pyproject_file.open("rb") as f: - data = tomli.load(f) - - projects = data["tool"]["uv"]["workspace"]["members"] - exclude = data["tool"]["uv"]["workspace"].get("exclude", []) - - all_projects: list[Path] = [] - for project in projects: - if "*" in project: - globbed = glob.glob(str(project), root_dir=workspace_pyproject_file.parent) - globbed_paths = [Path(p) for p in globbed] - all_projects.extend(globbed_paths) - else: - all_projects.append(Path(project)) - - for project in exclude: - if "*" in project: - globbed = glob.glob(str(project), root_dir=workspace_pyproject_file.parent) - globbed_paths = [Path(p) for p in globbed] - all_projects = [p for p in all_projects if p not in globbed_paths] - else: - all_projects = [p for p in all_projects if p != Path(project)] - - return all_projects - - -def extract_poe_tasks(file: Path) -> set[str]: - with file.open("rb") as f: - data = tomli.load(f) - - tasks = set(data.get("tool", {}).get("poe", {}).get("tasks", {}).keys()) - - # Check if there is an include too - include: str | None = data.get("tool", {}).get("poe", {}).get("include", None) - if include: - include_file = file.parent / include - if include_file.exists(): - tasks = tasks.union(extract_poe_tasks(include_file)) - - return tasks - - -def get_changed_packages(projects: list[Path], changed_files: list[str], workspace_root: Path) -> set[Path]: - """Determine which packages have changed files.""" - changed_packages: set[Path] = set() - core_package_changed = False - - for file_path in changed_files: - # Strip 'python/' prefix if present (when git diff is run from repo root) - file_path_str = str(file_path) - if file_path_str.startswith("python/"): - file_path_str = file_path_str[7:] # Remove 'python/' prefix - - # Convert to absolute path if relative - abs_path = Path(file_path_str) - if not abs_path.is_absolute(): - abs_path = workspace_root / file_path_str - - # Check which package this file belongs to - for project in projects: - project_abs = workspace_root / project - try: - # Check if the file is within this project directory - abs_path.relative_to(project_abs) - changed_packages.add(project) - # Check if the core package was changed - if project == Path("packages/core"): - core_package_changed = True - break - except ValueError: - # File is not in this project - continue - - # If core package changed, check all packages - if core_package_changed: - print("[yellow]Core package changed - checking all packages[/yellow]") - return set(projects) - - return changed_packages - - -def main() -> None: - parser = argparse.ArgumentParser(description="Run a task only in packages with changed files.") - parser.add_argument("task", help="The task name to run") - parser.add_argument("files", nargs="*", help="Changed files to determine which packages to run") - args = parser.parse_args() - - pyproject_file = Path(__file__).parent / "pyproject.toml" - workspace_root = pyproject_file.parent - projects = discover_projects(pyproject_file) - - # If no files specified, run in all packages (default behavior) - if not args.files or args.files == ["."]: - print(f"[yellow]No specific files provided, running {args.task} in all packages[/yellow]") - changed_packages = set(projects) - else: - changed_packages = get_changed_packages(projects, args.files, workspace_root) - if changed_packages: - print(f"[cyan]Detected changes in packages: {', '.join(str(p) for p in sorted(changed_packages))}[/cyan]") - else: - print(f"[yellow]No changes detected in any package, skipping {args.task}[/yellow]") - return - - # Run the task in changed packages - for project in sorted(changed_packages): - tasks = extract_poe_tasks(project / "pyproject.toml") - if args.task in tasks: - print(f"Running task {args.task} in {project}") - app = PoeThePoet(cwd=project) - result = app(cli_args=[args.task]) - if result: - sys.exit(result) - else: - print(f"Task {args.task} not found in {project}") - - -if __name__ == "__main__": - main() diff --git a/python/run_tasks_in_packages_if_exists.py b/python/run_tasks_in_packages_if_exists.py deleted file mode 100644 index d8748512d5..0000000000 --- a/python/run_tasks_in_packages_if_exists.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -import glob -import sys -from pathlib import Path - -import tomli -from poethepoet.app import PoeThePoet -from rich import print - - -def discover_projects(workspace_pyproject_file: Path) -> list[Path]: - with workspace_pyproject_file.open("rb") as f: - data = tomli.load(f) - - projects = data["tool"]["uv"]["workspace"]["members"] - exclude = data["tool"]["uv"]["workspace"].get("exclude", []) - - all_projects: list[Path] = [] - for project in projects: - if "*" in project: - globbed = glob.glob(str(project), root_dir=workspace_pyproject_file.parent) - globbed_paths = [Path(p) for p in globbed] - all_projects.extend(globbed_paths) - else: - all_projects.append(Path(project)) - - for project in exclude: - if "*" in project: - globbed = glob.glob(str(project), root_dir=workspace_pyproject_file.parent) - globbed_paths = [Path(p) for p in globbed] - all_projects = [p for p in all_projects if p not in globbed_paths] - else: - all_projects = [p for p in all_projects if p != Path(project)] - - return all_projects - - -def extract_poe_tasks(file: Path) -> set[str]: - with file.open("rb") as f: - data = tomli.load(f) - - tasks = set(data.get("tool", {}).get("poe", {}).get("tasks", {}).keys()) - - # Check if there is an include too - include: str | None = data.get("tool", {}).get("poe", {}).get("include", None) - if include: - include_file = file.parent / include - if include_file.exists(): - tasks = tasks.union(extract_poe_tasks(include_file)) - - return tasks - - -def main() -> None: - pyproject_file = Path(__file__).parent / "pyproject.toml" - projects = discover_projects(pyproject_file) - - if len(sys.argv) < 2: - print("Please provide a task name") - sys.exit(1) - - task_name = sys.argv[1] - for project in projects: - tasks = extract_poe_tasks(project / "pyproject.toml") - if task_name in tasks: - print(f"Running task {task_name} in {project}") - app = PoeThePoet(cwd=project) - result = app(cli_args=sys.argv[1:]) - if result: - sys.exit(result) - else: - print(f"Task {task_name} not found in {project}") - - -if __name__ == "__main__": - main() diff --git a/python/samples/SAMPLE_GUIDELINES.md b/python/samples/SAMPLE_GUIDELINES.md index e8c1589ef0..2dfd2dbc4a 100644 --- a/python/samples/SAMPLE_GUIDELINES.md +++ b/python/samples/SAMPLE_GUIDELINES.md @@ -2,6 +2,45 @@ Samples are extremely important for developers to get started with Agent Framework. We strive to provide a wide range of samples that demonstrate the capabilities of Agent Framework with consistency and quality. This document outlines the guidelines for creating samples. +## File Structure + +Every sample file should follow this order: + +1. PEP 723 inline script metadata (if external dependencies are needed) +2. Copyright header: `# Copyright (c) Microsoft. All rights reserved.` +3. Required imports +4. Module docstring: `"""This sample demonstrates..."""` +5. Helper functions +6. Main function(s) demonstrating functionality +7. Entry point: `if __name__ == "__main__": asyncio.run(main())` + +When modifying samples, update associated README files in the same or parent folders. + +## External Dependencies + +When samples depend on external packages not included in the dev environment (e.g., `semantic-kernel`, `autogen-agentchat`, `pandas`), declare them using [PEP 723](https://peps.python.org/pep-0723/) inline script metadata at the top of the file, before the copyright header: + +```python +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "some-external-package", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/path/to/script.py + +# Copyright (c) Microsoft. All rights reserved. +``` + +This makes samples self-contained and runnable without installing extra packages into the dev environment. Do not add sample-only dependencies to the root `pyproject.toml` dev group. + +## Syntax Checking + +Run `uv run poe samples-syntax` to check samples for syntax errors and missing imports from `agent_framework`. This uses a relaxed pyright configuration that validates imports without strict type checking. + +Some samples depend on external packages (e.g., `azure.ai.agentserver.agentframework`, `microsoft_agents`) that are not installed in the dev environment. These are excluded in `pyrightconfig.samples.json`. When adding or modifying these excluded samples, add them to the exclude list and manually verify they have no import errors from `agent_framework` packages by temporarily removing them from the exclude list and running the check. + ## General Guidelines - **Clear and Concise**: Samples should be clear and concise. They should demonstrate a specific set of features or capabilities of Agent Framework. The less concepts a sample demonstrates, the better. @@ -49,7 +88,7 @@ For the getting started samples and the concept samples, we should have the foll ```python # 1. Create the instance of the Kernel to register the plugin and service. ... - + # 2. Create the agent with the kernel instance. ... ``` @@ -64,7 +103,7 @@ For the getting started samples and the concept samples, we should have the foll User:> Why is the sky blue in one sentence? Mosscap:> The sky is blue due to the scattering of sunlight by the molecules in the Earth's atmosphere, a phenomenon known as Rayleigh scattering, which causes shorter blue wavelengths to become more - prominent in our visual perception. + prominent in our visual perception. ''' ``` diff --git a/python/samples/autogen-migration/orchestrations/01_round_robin_group_chat.py b/python/samples/autogen-migration/orchestrations/01_round_robin_group_chat.py index 0f7827deae..cffaae428f 100644 --- a/python/samples/autogen-migration/orchestrations/01_round_robin_group_chat.py +++ b/python/samples/autogen-migration/orchestrations/01_round_robin_group_chat.py @@ -1,3 +1,13 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "autogen-agentchat", +# "autogen-ext[openai]", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/autogen-migration/orchestrations/01_round_robin_group_chat.py + # Copyright (c) Microsoft. All rights reserved. """AutoGen RoundRobinGroupChat vs Agent Framework GroupChatBuilder/SequentialBuilder. diff --git a/python/samples/autogen-migration/orchestrations/02_selector_group_chat.py b/python/samples/autogen-migration/orchestrations/02_selector_group_chat.py index 2cb34bb4d2..1d46406c0a 100644 --- a/python/samples/autogen-migration/orchestrations/02_selector_group_chat.py +++ b/python/samples/autogen-migration/orchestrations/02_selector_group_chat.py @@ -1,3 +1,13 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "autogen-agentchat", +# "autogen-ext[openai]", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/autogen-migration/orchestrations/02_selector_group_chat.py + # Copyright (c) Microsoft. All rights reserved. """AutoGen SelectorGroupChat vs Agent Framework GroupChatBuilder. diff --git a/python/samples/autogen-migration/orchestrations/03_swarm.py b/python/samples/autogen-migration/orchestrations/03_swarm.py index 7f221e8b48..d0f6c74fe1 100644 --- a/python/samples/autogen-migration/orchestrations/03_swarm.py +++ b/python/samples/autogen-migration/orchestrations/03_swarm.py @@ -1,3 +1,13 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "autogen-agentchat", +# "autogen-ext[openai]", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/autogen-migration/orchestrations/03_swarm.py + # Copyright (c) Microsoft. All rights reserved. """AutoGen Swarm pattern vs Agent Framework HandoffBuilder. diff --git a/python/samples/autogen-migration/orchestrations/04_magentic_one.py b/python/samples/autogen-migration/orchestrations/04_magentic_one.py index caddaa3b43..f14cee5a26 100644 --- a/python/samples/autogen-migration/orchestrations/04_magentic_one.py +++ b/python/samples/autogen-migration/orchestrations/04_magentic_one.py @@ -1,3 +1,13 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "autogen-agentchat", +# "autogen-ext[openai]", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/autogen-migration/orchestrations/04_magentic_one.py + # Copyright (c) Microsoft. All rights reserved. """AutoGen MagenticOneGroupChat vs Agent Framework MagenticBuilder. diff --git a/python/samples/autogen-migration/single_agent/01_basic_assistant_agent.py b/python/samples/autogen-migration/single_agent/01_basic_assistant_agent.py index 8aad79b2c4..711bd648c8 100644 --- a/python/samples/autogen-migration/single_agent/01_basic_assistant_agent.py +++ b/python/samples/autogen-migration/single_agent/01_basic_assistant_agent.py @@ -1,3 +1,13 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "autogen-agentchat", +# "autogen-ext[openai]", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/autogen-migration/single_agent/01_basic_assistant_agent.py + # Copyright (c) Microsoft. All rights reserved. """Basic AutoGen AssistantAgent vs Agent Framework ChatAgent. diff --git a/python/samples/autogen-migration/single_agent/02_assistant_agent_with_tool.py b/python/samples/autogen-migration/single_agent/02_assistant_agent_with_tool.py index be251a272e..ff56e694a0 100644 --- a/python/samples/autogen-migration/single_agent/02_assistant_agent_with_tool.py +++ b/python/samples/autogen-migration/single_agent/02_assistant_agent_with_tool.py @@ -1,3 +1,14 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "autogen-agentchat", +# "autogen-core", +# "autogen-ext[openai]", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/autogen-migration/single_agent/02_assistant_agent_with_tool.py + # Copyright (c) Microsoft. All rights reserved. """AutoGen AssistantAgent vs Agent Framework ChatAgent with function tools. diff --git a/python/samples/autogen-migration/single_agent/03_assistant_agent_thread_and_stream.py b/python/samples/autogen-migration/single_agent/03_assistant_agent_thread_and_stream.py index 8cb516fe85..73fb0f3c62 100644 --- a/python/samples/autogen-migration/single_agent/03_assistant_agent_thread_and_stream.py +++ b/python/samples/autogen-migration/single_agent/03_assistant_agent_thread_and_stream.py @@ -1,3 +1,13 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "autogen-agentchat", +# "autogen-ext[openai]", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/autogen-migration/single_agent/03_assistant_agent_thread_and_stream.py + # Copyright (c) Microsoft. All rights reserved. """AutoGen vs Agent Framework: Thread management and streaming responses. diff --git a/python/samples/autogen-migration/single_agent/04_agent_as_tool.py b/python/samples/autogen-migration/single_agent/04_agent_as_tool.py index 432e489d45..fadf4c64f4 100644 --- a/python/samples/autogen-migration/single_agent/04_agent_as_tool.py +++ b/python/samples/autogen-migration/single_agent/04_agent_as_tool.py @@ -1,3 +1,13 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "autogen-agentchat", +# "autogen-ext[openai]", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/autogen-migration/single_agent/04_agent_as_tool.py + # Copyright (c) Microsoft. All rights reserved. """AutoGen vs Agent Framework: Agent-as-a-Tool pattern. diff --git a/python/samples/demos/chatkit-integration/app.py b/python/samples/demos/chatkit-integration/app.py index 7ae37d28fc..44a2e125f6 100644 --- a/python/samples/demos/chatkit-integration/app.py +++ b/python/samples/demos/chatkit-integration/app.py @@ -1,3 +1,13 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "fastapi", +# "uvicorn", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/demos/chatkit-integration/app.py + # Copyright (c) Microsoft. All rights reserved. """ diff --git a/python/samples/demos/m365-agent/m365_agent_demo/app.py b/python/samples/demos/m365-agent/m365_agent_demo/app.py index 3aa7382811..212941efa7 100644 --- a/python/samples/demos/m365-agent/m365_agent_demo/app.py +++ b/python/samples/demos/m365-agent/m365_agent_demo/app.py @@ -1,4 +1,3 @@ -# Copyright (c) Microsoft. All rights reserved. # /// script # requires-python = ">=3.11" # dependencies = [ @@ -10,6 +9,9 @@ # "aiohttp" # ] # /// +# Copyright (c) Microsoft. All rights reserved. +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/demos/m365-agent/m365_agent_demo/app.py import os from dataclasses import dataclass diff --git a/python/samples/getting_started/agents/azure_ai/azure_ai_with_image_generation.py b/python/samples/getting_started/agents/azure_ai/azure_ai_with_image_generation.py index 707a71f05c..a097d3f4c2 100644 --- a/python/samples/getting_started/agents/azure_ai/azure_ai_with_image_generation.py +++ b/python/samples/getting_started/agents/azure_ai/azure_ai_with_image_generation.py @@ -5,7 +5,6 @@ from pathlib import Path from urllib import request as urllib_request -import aiofiles from agent_framework import HostedImageGenerationTool from agent_framework.azure import AzureAIProjectAgentProvider from azure.identity.aio import AzureCliCredential @@ -89,8 +88,8 @@ async def main() -> None: if data_bytes is None: raise RuntimeError("Image output present but could not retrieve bytes.") - async with aiofiles.open(file_path, "wb") as f: - await f.write(data_bytes) + with open(file_path, "wb") as f: + f.write(data_bytes) print(f"Image downloaded and saved to: {file_path}") else: diff --git a/python/samples/getting_started/agents/azure_ai/azure_ai_with_openapi.py b/python/samples/getting_started/agents/azure_ai/azure_ai_with_openapi.py index 17a6d78f91..260a5a0206 100644 --- a/python/samples/getting_started/agents/azure_ai/azure_ai_with_openapi.py +++ b/python/samples/getting_started/agents/azure_ai/azure_ai_with_openapi.py @@ -3,7 +3,6 @@ import json from pathlib import Path -import aiofiles from agent_framework.azure import AzureAIProjectAgentProvider from azure.identity.aio import AzureCliCredential @@ -23,9 +22,8 @@ async def main() -> None: # Load the OpenAPI specification resources_path = Path(__file__).parent.parent / "resources" / "countries.json" - async with aiofiles.open(resources_path, "r") as f: - content = await f.read() - openapi_countries = json.loads(content) + with open(resources_path) as f: + openapi_countries = json.load(f) async with ( AzureCliCredential() as credential, diff --git a/python/samples/getting_started/agents/copilotstudio/copilotstudio_with_explicit_settings.py b/python/samples/getting_started/agents/copilotstudio/copilotstudio_with_explicit_settings.py index 85e120097e..7f26019550 100644 --- a/python/samples/getting_started/agents/copilotstudio/copilotstudio_with_explicit_settings.py +++ b/python/samples/getting_started/agents/copilotstudio/copilotstudio_with_explicit_settings.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "microsoft-agents", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/getting_started/agents/copilotstudio/copilotstudio_with_explicit_settings.py + # Copyright (c) Microsoft. All rights reserved. import asyncio diff --git a/python/samples/getting_started/azure_functions/README.md b/python/samples/getting_started/azure_functions/README.md index 3839d600a0..25e0f308d5 100644 --- a/python/samples/getting_started/azure_functions/README.md +++ b/python/samples/getting_started/azure_functions/README.md @@ -8,7 +8,7 @@ All of these samples are set up to run in Azure Functions. Azure Functions has a ### 1. Install dependencies and create appropriate services - Install [Azure Functions Core Tools 4.x](https://learn.microsoft.com/azure/azure-functions/functions-run-local?tabs=windows%2Cpython%2Cv2&pivots=programming-language-python#install-the-azure-functions-core-tools) - + - Install [Azurite storage emulator](https://learn.microsoft.com/en-us/azure/storage/common/storage-install-azurite?toc=%2Fazure%2Fstorage%2Fblobs%2Ftoc.json&bc=%2Fazure%2Fstorage%2Fblobs%2Fbreadcrumb%2Ftoc.json&tabs=visual-studio%2Cblob-storage) - Create an [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-foundry/models/openai) resource. Note the Azure OpenAI endpoint, deployment name, and the key (or ensure you can authenticate with `AzureCliCredential`). @@ -29,17 +29,17 @@ python -m venv .venv ```bash python -m venv .venv source .venv/bin/activate -``` +``` -### 3. Running the samples +### 3. Running the samples - [Start the Azurite emulator](https://learn.microsoft.com/en-us/azure/storage/common/storage-install-azurite?tabs=npm%2Cblob-storage#run-azurite) -- Inside each sample: +- Inside each sample: - Install Python dependencies – from the sample directory, run `pip install -r requirements.txt` (or the equivalent in your active virtual environment). - - - Copy `local.settings.json.template` to `local.settings.json`, then update `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_CHAT_DEPLOYMENT_NAME` for Azure OpenAI authentication. The samples use `AzureCliCredential` by default, so ensure you're logged in via `az login`. + + - Copy `local.settings.json.template` to `local.settings.json`, then update `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_CHAT_DEPLOYMENT_NAME` for Azure OpenAI authentication. The samples use `AzureCliCredential` by default, so ensure you're logged in via `az login`. - Alternatively, you can use API key authentication by setting `AZURE_OPENAI_API_KEY` and updating the code to use `AzureOpenAIChatClient()` without the credential parameter. - Keep `TASKHUB_NAME` set to `default` unless you plan to change the durable task hub name. diff --git a/python/samples/getting_started/evaluation/red_teaming/README.md b/python/samples/getting_started/evaluation/red_teaming/README.md index b31cd91044..39fda91ae4 100644 --- a/python/samples/getting_started/evaluation/red_teaming/README.md +++ b/python/samples/getting_started/evaluation/red_teaming/README.md @@ -31,7 +31,7 @@ A focused sample demonstrating Azure AI's RedTeam functionality to assess the sa ### Python Environment ```bash -pip install agent-framework azure-ai-evaluation pyrit duckdb azure-identity aiofiles +pip install agent-framework azure-ai-evaluation pyrit duckdb azure-identity ``` Note: The sample uses `python-dotenv` to load environment variables from a `.env` file. diff --git a/python/samples/getting_started/evaluation/red_teaming/red_team_agent_sample.py b/python/samples/getting_started/evaluation/red_teaming/red_team_agent_sample.py index 38a5dffaaf..6e240d66b4 100644 --- a/python/samples/getting_started/evaluation/red_teaming/red_team_agent_sample.py +++ b/python/samples/getting_started/evaluation/red_teaming/red_team_agent_sample.py @@ -23,7 +23,7 @@ - Environment variables set in .env file or environment Installation: - pip install agent-framework azure-ai-evaluation pyrit duckdb azure-identity aiofiles + pip install agent-framework azure-ai-evaluation pyrit duckdb azure-identity Reference: Azure AI Red Teaming: https://github.com/Azure-Samples/azureai-samples/blob/main/scenarios/evaluate/AI_RedTeaming/AI_RedTeaming.ipynb diff --git a/python/samples/getting_started/evaluation/self_reflection/self_reflection.py b/python/samples/getting_started/evaluation/self_reflection/self_reflection.py index 274fa901f3..931d292dd1 100644 --- a/python/samples/getting_started/evaluation/self_reflection/self_reflection.py +++ b/python/samples/getting_started/evaluation/self_reflection/self_reflection.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "pandas", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/getting_started/evaluation/self_reflection/self_reflection.py + # Copyright (c) Microsoft. All rights reserved. # type: ignore import argparse diff --git a/python/samples/getting_started/observability/README.md b/python/samples/getting_started/observability/README.md index 5f3dd238c2..d42162b23c 100644 --- a/python/samples/getting_started/observability/README.md +++ b/python/samples/getting_started/observability/README.md @@ -273,7 +273,7 @@ If you're updating from a previous version of the Agent Framework, here are the ### OTLP Configuration **Before (Deprecated):** -```python +``` from agent_framework.observability import setup_observability # Via parameter setup_observability(otlp_endpoint="http://localhost:4317") @@ -305,7 +305,7 @@ configure_otel_providers(exporters=[ ### Azure Monitor Configuration **Before (Deprecated):** -```python +``` from agent_framework.observability import setup_observability setup_observability( @@ -341,7 +341,7 @@ enable_instrumentation() ### Console Output **Before (Deprecated):** -```python +``` from agent_framework.observability import setup_observability # Console was used as automatic fallback diff --git a/python/samples/getting_started/observability/agent_with_foundry_tracing.py b/python/samples/getting_started/observability/agent_with_foundry_tracing.py index 0e84a171fa..431c5b7868 100644 --- a/python/samples/getting_started/observability/agent_with_foundry_tracing.py +++ b/python/samples/getting_started/observability/agent_with_foundry_tracing.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "azure-monitor-opentelemetry", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/getting_started/observability/agent_with_foundry_tracing.py + # Copyright (c) Microsoft. All rights reserved. import asyncio diff --git a/python/samples/getting_started/workflows/declarative/README.md b/python/samples/getting_started/workflows/declarative/README.md index b2ce6de198..290a297042 100644 --- a/python/samples/getting_started/workflows/declarative/README.md +++ b/python/samples/getting_started/workflows/declarative/README.md @@ -40,7 +40,7 @@ actions: - kind: SetValue path: turn.greeting value: Hello, World! - + - kind: SendActivity activity: text: =turn.greeting diff --git a/python/samples/getting_started/workflows/declarative/customer_support/workflow.yaml b/python/samples/getting_started/workflows/declarative/customer_support/workflow.yaml index 62ce67c651..81ece8f24b 100644 --- a/python/samples/getting_started/workflows/declarative/customer_support/workflow.yaml +++ b/python/samples/getting_started/workflows/declarative/customer_support/workflow.yaml @@ -2,7 +2,7 @@ # This workflow demonstrates using multiple agents to provide automated # troubleshooting steps to resolve common issues with escalation options. # -# Example input: +# Example input: # My PC keeps rebooting and I can't use it. # kind: Workflow @@ -12,7 +12,7 @@ trigger: id: workflow_demo actions: - # Interact with user until the issue has been resolved or + # Interact with user until the issue has been resolved or # a determination is made that a ticket is required. - kind: InvokeAzureAgent id: service_agent @@ -23,7 +23,7 @@ trigger: externalLoop: when: |- =Not(Local.ServiceParameters.IsResolved) - And + And Not(Local.ServiceParameters.NeedsTicket) output: responseObject: Local.ServiceParameters @@ -32,14 +32,14 @@ trigger: - kind: ConditionGroup id: check_if_resolved conditions: - + - condition: =Local.ServiceParameters.IsResolved id: test_if_resolved actions: - kind: GotoAction id: end_when_resolved actionId: all_done - + # Create the ticket. - kind: InvokeAzureAgent id: ticket_agent @@ -103,7 +103,7 @@ trigger: externalLoop: when: |- =Not(Local.SupportParameters.IsResolved) - And + And Not(Local.SupportParameters.NeedsEscalation) output: autoSend: true @@ -124,7 +124,7 @@ trigger: - condition: =Local.SupportParameters.IsResolved id: handle_if_resolved actions: - + - kind: InvokeAzureAgent id: resolution_agent agent: diff --git a/python/samples/getting_started/workflows/parallelism/map_reduce_and_visualization.py b/python/samples/getting_started/workflows/parallelism/map_reduce_and_visualization.py index 1450399952..d29fe14bfb 100644 --- a/python/samples/getting_started/workflows/parallelism/map_reduce_and_visualization.py +++ b/python/samples/getting_started/workflows/parallelism/map_reduce_and_visualization.py @@ -6,7 +6,6 @@ from collections import defaultdict from dataclasses import dataclass -import aiofiles from agent_framework import ( Executor, # Base class for custom workflow steps WorkflowBuilder, # Fluent builder for executors and edges @@ -33,13 +32,12 @@ Prerequisites: - Familiarity with WorkflowBuilder, executors, fan out and fan in edges, events, and streaming runs. -- aiofiles installed for async file I/O. - Write access to a tmp directory next to this script. - A source text at resources/long_text.txt. - Optional for SVG export: install graphviz. Installation: - pip install agent-framework aiofiles graphviz + pip install agent-framework graphviz """ # Define the temporary directory for storing intermediate results @@ -128,8 +126,8 @@ async def map(self, _: SplitCompleted, ctx: WorkflowContext[MapCompleted]) -> No # Write this mapper's results as simple text lines for easy debugging. file_path = os.path.join(TEMP_DIR, f"map_results_{self.id}.txt") - async with aiofiles.open(file_path, "w") as f: - await f.writelines([f"{item}: {count}\n" for item, count in results]) + with open(file_path, "w") as f: + f.writelines([f"{item}: {count}\n" for item, count in results]) await ctx.send_message(MapCompleted(file_path)) @@ -163,8 +161,8 @@ async def shuffle(self, data: list[MapCompleted], ctx: WorkflowContext[ShuffleCo async def _process_chunk(chunk: list[tuple[str, list[int]]], index: int) -> None: """Write one grouped partition for reducer index and notify that reducer.""" file_path = os.path.join(TEMP_DIR, f"shuffle_results_{index}.txt") - async with aiofiles.open(file_path, "w") as f: - await f.writelines([f"{key}: {value}\n" for key, value in chunk]) + with open(file_path, "w") as f: + f.writelines([f"{key}: {value}\n" for key, value in chunk]) await ctx.send_message(ShuffleCompleted(file_path, self._reducer_ids[index])) tasks = [asyncio.create_task(_process_chunk(chunk, i)) for i, chunk in enumerate(chunks)] @@ -179,9 +177,9 @@ async def _preprocess(self, data: list[MapCompleted]) -> list[list[tuple[str, li # Load all intermediate pairs. map_results: list[tuple[str, int]] = [] for result in data: - async with aiofiles.open(result.file_path, "r") as f: + with open(result.file_path) as f: map_results.extend([ - (line.strip().split(": ")[0], int(line.strip().split(": ")[1])) for line in await f.readlines() + (line.strip().split(": ")[0], int(line.strip().split(": ")[1])) for line in f.readlines() ]) # Group values by token. @@ -230,8 +228,8 @@ async def _execute(self, data: ShuffleCompleted, ctx: WorkflowContext[ReduceComp return # Read grouped values from the shuffle output. - async with aiofiles.open(data.file_path, "r") as f: - lines = await f.readlines() + with open(data.file_path) as f: + lines = f.readlines() # Sum values per key. Values are serialized Python lists like [1, 1, ...]. reduced_results: dict[str, int] = defaultdict(int) @@ -241,8 +239,8 @@ async def _execute(self, data: ShuffleCompleted, ctx: WorkflowContext[ReduceComp # Persist our partition totals. file_path = os.path.join(TEMP_DIR, f"reduced_results_{self.id}.txt") - async with aiofiles.open(file_path, "w") as f: - await f.writelines([f"{key}: {value}\n" for key, value in reduced_results.items()]) + with open(file_path, "w") as f: + f.writelines([f"{key}: {value}\n" for key, value in reduced_results.items()]) await ctx.send_message(ReduceCompleted(file_path)) @@ -324,8 +322,8 @@ async def main(): print("Tip: Install 'viz' extra to export workflow visualization: pip install agent-framework[viz] --pre") # Step 3: Open the text file and read its content. - async with aiofiles.open(os.path.join(DIR, "../resources", "long_text.txt"), "r") as f: - raw_text = await f.read() + with open(os.path.join(DIR, "../resources", "long_text.txt")) as f: + raw_text = f.read() # Step 4: Run the workflow with the raw text as input. async for event in workflow.run(raw_text, stream=True): diff --git a/python/samples/semantic-kernel-migration/azure_ai_agent/01_basic_azure_ai_agent.py b/python/samples/semantic-kernel-migration/azure_ai_agent/01_basic_azure_ai_agent.py index c54dae167d..5b85fc6722 100644 --- a/python/samples/semantic-kernel-migration/azure_ai_agent/01_basic_azure_ai_agent.py +++ b/python/samples/semantic-kernel-migration/azure_ai_agent/01_basic_azure_ai_agent.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/azure_ai_agent/01_basic_azure_ai_agent.py + # Copyright (c) Microsoft. All rights reserved. """Create an Azure AI agent using both Semantic Kernel and Agent Framework. diff --git a/python/samples/semantic-kernel-migration/azure_ai_agent/02_azure_ai_agent_with_code_interpreter.py b/python/samples/semantic-kernel-migration/azure_ai_agent/02_azure_ai_agent_with_code_interpreter.py index acbd45481b..81c059fc90 100644 --- a/python/samples/semantic-kernel-migration/azure_ai_agent/02_azure_ai_agent_with_code_interpreter.py +++ b/python/samples/semantic-kernel-migration/azure_ai_agent/02_azure_ai_agent_with_code_interpreter.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/azure_ai_agent/02_azure_ai_agent_with_code_interpreter.py + # Copyright (c) Microsoft. All rights reserved. """Enable the hosted code interpreter for Azure AI agents in SK and AF. diff --git a/python/samples/semantic-kernel-migration/azure_ai_agent/03_azure_ai_agent_threads_and_followups.py b/python/samples/semantic-kernel-migration/azure_ai_agent/03_azure_ai_agent_threads_and_followups.py index ad1386c23a..ae0b28e37d 100644 --- a/python/samples/semantic-kernel-migration/azure_ai_agent/03_azure_ai_agent_threads_and_followups.py +++ b/python/samples/semantic-kernel-migration/azure_ai_agent/03_azure_ai_agent_threads_and_followups.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/azure_ai_agent/03_azure_ai_agent_threads_and_followups.py + # Copyright (c) Microsoft. All rights reserved. """Maintain Azure AI agent conversation state across turns in SK and AF.""" diff --git a/python/samples/semantic-kernel-migration/chat_completion/01_basic_chat_completion.py b/python/samples/semantic-kernel-migration/chat_completion/01_basic_chat_completion.py index 494c1f417f..74ecd1ecf5 100644 --- a/python/samples/semantic-kernel-migration/chat_completion/01_basic_chat_completion.py +++ b/python/samples/semantic-kernel-migration/chat_completion/01_basic_chat_completion.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/chat_completion/01_basic_chat_completion.py + # Copyright (c) Microsoft. All rights reserved. """Basic SK ChatCompletionAgent vs Agent Framework ChatAgent. diff --git a/python/samples/semantic-kernel-migration/chat_completion/02_chat_completion_with_tool.py b/python/samples/semantic-kernel-migration/chat_completion/02_chat_completion_with_tool.py index 363cdaec53..2bf7266018 100644 --- a/python/samples/semantic-kernel-migration/chat_completion/02_chat_completion_with_tool.py +++ b/python/samples/semantic-kernel-migration/chat_completion/02_chat_completion_with_tool.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/chat_completion/02_chat_completion_with_tool.py + # Copyright (c) Microsoft. All rights reserved. """Demonstrate SK plugins vs Agent Framework tools with a chat agent. diff --git a/python/samples/semantic-kernel-migration/chat_completion/03_chat_completion_thread_and_stream.py b/python/samples/semantic-kernel-migration/chat_completion/03_chat_completion_thread_and_stream.py index 5d802867b1..d357c2f957 100644 --- a/python/samples/semantic-kernel-migration/chat_completion/03_chat_completion_thread_and_stream.py +++ b/python/samples/semantic-kernel-migration/chat_completion/03_chat_completion_thread_and_stream.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/chat_completion/03_chat_completion_thread_and_stream.py + # Copyright (c) Microsoft. All rights reserved. """Compare conversation threading and streaming responses for chat agents. diff --git a/python/samples/semantic-kernel-migration/copilot_studio/01_basic_copilot_studio_agent.py b/python/samples/semantic-kernel-migration/copilot_studio/01_basic_copilot_studio_agent.py index a1ffd95799..2c0d7261fb 100644 --- a/python/samples/semantic-kernel-migration/copilot_studio/01_basic_copilot_studio_agent.py +++ b/python/samples/semantic-kernel-migration/copilot_studio/01_basic_copilot_studio_agent.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/copilot_studio/01_basic_copilot_studio_agent.py + # Copyright (c) Microsoft. All rights reserved. """Call a Copilot Studio agent with SK and Agent Framework.""" diff --git a/python/samples/semantic-kernel-migration/copilot_studio/02_copilot_studio_streaming.py b/python/samples/semantic-kernel-migration/copilot_studio/02_copilot_studio_streaming.py index e0f02f682c..a30aa58ff2 100644 --- a/python/samples/semantic-kernel-migration/copilot_studio/02_copilot_studio_streaming.py +++ b/python/samples/semantic-kernel-migration/copilot_studio/02_copilot_studio_streaming.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/copilot_studio/02_copilot_studio_streaming.py + # Copyright (c) Microsoft. All rights reserved. """Stream responses from Copilot Studio agents in SK and AF.""" diff --git a/python/samples/semantic-kernel-migration/openai_assistant/01_basic_openai_assistant.py b/python/samples/semantic-kernel-migration/openai_assistant/01_basic_openai_assistant.py index dda342c87f..34709fbaf1 100644 --- a/python/samples/semantic-kernel-migration/openai_assistant/01_basic_openai_assistant.py +++ b/python/samples/semantic-kernel-migration/openai_assistant/01_basic_openai_assistant.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/openai_assistant/01_basic_openai_assistant.py + # Copyright (c) Microsoft. All rights reserved. """Create an OpenAI Assistant using SK and Agent Framework.""" diff --git a/python/samples/semantic-kernel-migration/openai_assistant/02_openai_assistant_with_code_interpreter.py b/python/samples/semantic-kernel-migration/openai_assistant/02_openai_assistant_with_code_interpreter.py index 3b0cd166f2..034404990d 100644 --- a/python/samples/semantic-kernel-migration/openai_assistant/02_openai_assistant_with_code_interpreter.py +++ b/python/samples/semantic-kernel-migration/openai_assistant/02_openai_assistant_with_code_interpreter.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/openai_assistant/02_openai_assistant_with_code_interpreter.py + # Copyright (c) Microsoft. All rights reserved. """Enable the code interpreter tool for OpenAI Assistants in SK and AF.""" diff --git a/python/samples/semantic-kernel-migration/openai_assistant/03_openai_assistant_function_tool.py b/python/samples/semantic-kernel-migration/openai_assistant/03_openai_assistant_function_tool.py index e84bc1b171..6f88f29832 100644 --- a/python/samples/semantic-kernel-migration/openai_assistant/03_openai_assistant_function_tool.py +++ b/python/samples/semantic-kernel-migration/openai_assistant/03_openai_assistant_function_tool.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/openai_assistant/03_openai_assistant_function_tool.py + # Copyright (c) Microsoft. All rights reserved. """Implement a function tool for OpenAI Assistants in SK and AF.""" diff --git a/python/samples/semantic-kernel-migration/openai_responses/01_basic_responses_agent.py b/python/samples/semantic-kernel-migration/openai_responses/01_basic_responses_agent.py index 7e39fb7a98..3402a2e1e3 100644 --- a/python/samples/semantic-kernel-migration/openai_responses/01_basic_responses_agent.py +++ b/python/samples/semantic-kernel-migration/openai_responses/01_basic_responses_agent.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/openai_responses/01_basic_responses_agent.py + # Copyright (c) Microsoft. All rights reserved. """Issue a basic Responses API call using SK and Agent Framework.""" diff --git a/python/samples/semantic-kernel-migration/openai_responses/02_responses_agent_with_tool.py b/python/samples/semantic-kernel-migration/openai_responses/02_responses_agent_with_tool.py index fb18708ddf..c770763bce 100644 --- a/python/samples/semantic-kernel-migration/openai_responses/02_responses_agent_with_tool.py +++ b/python/samples/semantic-kernel-migration/openai_responses/02_responses_agent_with_tool.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/openai_responses/02_responses_agent_with_tool.py + # Copyright (c) Microsoft. All rights reserved. """Attach a lightweight function tool to the Responses API in SK and AF.""" diff --git a/python/samples/semantic-kernel-migration/openai_responses/03_responses_agent_structured_output.py b/python/samples/semantic-kernel-migration/openai_responses/03_responses_agent_structured_output.py index b124e5f0f1..bd37c3b33c 100644 --- a/python/samples/semantic-kernel-migration/openai_responses/03_responses_agent_structured_output.py +++ b/python/samples/semantic-kernel-migration/openai_responses/03_responses_agent_structured_output.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/openai_responses/03_responses_agent_structured_output.py + # Copyright (c) Microsoft. All rights reserved. """Request structured JSON output from the Responses API in SK and AF.""" diff --git a/python/samples/semantic-kernel-migration/orchestrations/concurrent_basic.py b/python/samples/semantic-kernel-migration/orchestrations/concurrent_basic.py index a5b012da94..72f0c24252 100644 --- a/python/samples/semantic-kernel-migration/orchestrations/concurrent_basic.py +++ b/python/samples/semantic-kernel-migration/orchestrations/concurrent_basic.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/orchestrations/concurrent_basic.py + # Copyright (c) Microsoft. All rights reserved. """Side-by-side concurrent orchestrations for Agent Framework and Semantic Kernel.""" @@ -6,7 +15,7 @@ from collections.abc import Sequence from typing import cast -from agent_framework import ChatMessage, ConcurrentBuilderWorkflowEvent +from agent_framework import ChatMessage from agent_framework.azure import AzureOpenAIChatClient from azure.identity import AzureCliCredential from semantic_kernel.agents import Agent, ChatCompletionAgent, ConcurrentOrchestration diff --git a/python/samples/semantic-kernel-migration/orchestrations/group_chat.py b/python/samples/semantic-kernel-migration/orchestrations/group_chat.py index dda7e7922c..539041a537 100644 --- a/python/samples/semantic-kernel-migration/orchestrations/group_chat.py +++ b/python/samples/semantic-kernel-migration/orchestrations/group_chat.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/orchestrations/group_chat.py + # Copyright (c) Microsoft. All rights reserved. """Side-by-side group chat orchestrations for Agent Framework and Semantic Kernel.""" diff --git a/python/samples/semantic-kernel-migration/orchestrations/handoff.py b/python/samples/semantic-kernel-migration/orchestrations/handoff.py index 5d848ac6ba..3fe024a9f4 100644 --- a/python/samples/semantic-kernel-migration/orchestrations/handoff.py +++ b/python/samples/semantic-kernel-migration/orchestrations/handoff.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/orchestrations/handoff.py + # Copyright (c) Microsoft. All rights reserved. """Side-by-side handoff orchestrations for Semantic Kernel and Agent Framework.""" diff --git a/python/samples/semantic-kernel-migration/orchestrations/magentic.py b/python/samples/semantic-kernel-migration/orchestrations/magentic.py index 4eef2e9dec..d6509fb4d7 100644 --- a/python/samples/semantic-kernel-migration/orchestrations/magentic.py +++ b/python/samples/semantic-kernel-migration/orchestrations/magentic.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/orchestrations/magentic.py + # Copyright (c) Microsoft. All rights reserved. """Side-by-side Magentic orchestrations for Agent Framework and Semantic Kernel.""" diff --git a/python/samples/semantic-kernel-migration/orchestrations/sequential.py b/python/samples/semantic-kernel-migration/orchestrations/sequential.py index a810b3178b..13bfdf82a0 100644 --- a/python/samples/semantic-kernel-migration/orchestrations/sequential.py +++ b/python/samples/semantic-kernel-migration/orchestrations/sequential.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/orchestrations/sequential.py + # Copyright (c) Microsoft. All rights reserved. """Side-by-side sequential orchestrations for Agent Framework and Semantic Kernel.""" diff --git a/python/samples/semantic-kernel-migration/processes/fan_out_fan_in_process.py b/python/samples/semantic-kernel-migration/processes/fan_out_fan_in_process.py index efd2253323..afca864ea7 100644 --- a/python/samples/semantic-kernel-migration/processes/fan_out_fan_in_process.py +++ b/python/samples/semantic-kernel-migration/processes/fan_out_fan_in_process.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/processes/fan_out_fan_in_process.py + # Copyright (c) Microsoft. All rights reserved. """Side-by-side sample comparing Semantic Kernel Process Framework and Agent Framework workflows.""" diff --git a/python/samples/semantic-kernel-migration/processes/nested_process.py b/python/samples/semantic-kernel-migration/processes/nested_process.py index ab1b2bb64c..775647d992 100644 --- a/python/samples/semantic-kernel-migration/processes/nested_process.py +++ b/python/samples/semantic-kernel-migration/processes/nested_process.py @@ -1,3 +1,12 @@ +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "semantic-kernel", +# ] +# /// +# Run with any PEP 723 compatible runner, e.g.: +# uv run samples/semantic-kernel-migration/processes/nested_process.py + # Copyright (c) Microsoft. All rights reserved. """Nested process comparison between Semantic Kernel Process Framework and Agent Framework sub-workflows.""" @@ -17,7 +26,7 @@ WorkflowBuilder, WorkflowContext, WorkflowExecutor, - + handler, ) from pydantic import BaseModel, Field diff --git a/python/check_md_code_blocks.py b/python/scripts/check_md_code_blocks.py similarity index 80% rename from python/check_md_code_blocks.py rename to python/scripts/check_md_code_blocks.py index 7377a73038..7510f32fb9 100644 --- a/python/check_md_code_blocks.py +++ b/python/scripts/check_md_code_blocks.py @@ -6,6 +6,7 @@ from enum import Enum import glob import logging +import os import tempfile import subprocess # nosec @@ -96,14 +97,29 @@ def check_code_blocks(markdown_file_paths: list[str], exclude_patterns: list[str logger.info(f' {with_color("OK[ignored]", Colors.CGREENBG)}') continue - with tempfile.NamedTemporaryFile(suffix=".py", delete=False) as temp_file: - temp_file.write(code_block.encode("utf-8")) - temp_file.flush() - - # Run pyright on the temporary file using subprocess.run - - result = subprocess.run(["uv", "run", "pyright", temp_file.name], capture_output=True, text=True, cwd=".") # nosec - if result.returncode != 0: + with tempfile.TemporaryDirectory() as tmp_dir: + # Use the same rules as pyrightconfig.samples.json: + # typeCheckingMode=off, only reportMissingImports and reportAttributeAccessIssue enabled. + pyright_cfg = os.path.join(tmp_dir, "pyrightconfig.json") + with open(pyright_cfg, "w") as cfg: + cfg.write( + '{"include":["."],"typeCheckingMode":"off",' + '"reportMissingImports":"error","reportAttributeAccessIssue":"error"}' + ) + tmp_file = os.path.join(tmp_dir, "snippet.py") + with open(tmp_file, "w", encoding="utf-8") as f: + f.write(code_block) + + result = subprocess.run(["uv", "run", "pyright", "-p", tmp_dir], capture_output=True, text=True, cwd=".") # nosec + # Filter to only errors from our config rules; syntax-level errors + # (top-level await, etc.) are expected in README documentation snippets. + # Only flag reportMissingImports for agent_framework modules, not third-party packages. + relevant_errors = [ + line for line in result.stdout.splitlines() + if ("reportMissingImports" in line and "agent_framework" in line) + or "reportAttributeAccessIssue" in line + ] + if relevant_errors: highlighted_code = highlight(code_block, PythonLexer(), TerminalFormatter()) # type: ignore logger.info( f" {with_color('FAIL', Colors.CREDBG)}\n" diff --git a/python/scripts/run_tasks_in_changed_packages.py b/python/scripts/run_tasks_in_changed_packages.py new file mode 100644 index 0000000000..9773e278d4 --- /dev/null +++ b/python/scripts/run_tasks_in_changed_packages.py @@ -0,0 +1,81 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Run task(s) only in packages that have changed files, in parallel by default.""" + +import argparse +from pathlib import Path + +from rich import print +from task_runner import build_work_items, discover_projects, run_tasks + + +def get_changed_packages(projects: list[Path], changed_files: list[str], workspace_root: Path) -> set[Path]: + """Determine which packages have changed files.""" + changed_packages: set[Path] = set() + core_package_changed = False + + for file_path in changed_files: + # Strip 'python/' prefix if present (when git diff is run from repo root) + file_path_str = str(file_path) + if file_path_str.startswith("python/"): + file_path_str = file_path_str[7:] # Remove 'python/' prefix + + # Convert to absolute path if relative + abs_path = Path(file_path_str) + if not abs_path.is_absolute(): + abs_path = workspace_root / file_path_str + + # Check which package this file belongs to + for project in projects: + project_abs = workspace_root / project + try: + # Check if the file is within this project directory + abs_path.relative_to(project_abs) + changed_packages.add(project) + # Check if the core package was changed + if project == Path("packages/core"): + core_package_changed = True + break + except ValueError: + # File is not in this project + continue + + # If core package changed, check all packages + if core_package_changed: + print("[yellow]Core package changed - checking all packages[/yellow]") + return set(projects) + + return changed_packages + + +def main() -> None: + parser = argparse.ArgumentParser(description="Run task(s) in changed packages, in parallel by default.") + parser.add_argument("tasks", nargs="+", help="Task name(s) to run") + parser.add_argument("--files", nargs="*", default=None, help="Changed files to determine which packages to run") + parser.add_argument("--seq", action="store_true", help="Run sequentially instead of in parallel") + args = parser.parse_args() + + pyproject_file = Path(__file__).parent.parent / "pyproject.toml" + workspace_root = pyproject_file.parent + projects = discover_projects(pyproject_file) + + # Determine which packages to check + if not args.files or args.files == ["."]: + task_list = ", ".join(args.tasks) + print(f"[yellow]No specific files provided, running {task_list} in all packages[/yellow]") + target_packages = sorted(set(projects)) + else: + changed_packages = get_changed_packages(projects, args.files, workspace_root) + if changed_packages: + print(f"[cyan]Detected changes in packages: {', '.join(str(p) for p in sorted(changed_packages))}[/cyan]") + else: + print(f"[yellow]No changes detected in any package, skipping[/yellow]") + return + target_packages = sorted(changed_packages) + + work_items = build_work_items(target_packages, args.tasks) + run_tasks(work_items, workspace_root, sequential=args.seq) + + +if __name__ == "__main__": + main() diff --git a/python/scripts/run_tasks_in_packages_if_exists.py b/python/scripts/run_tasks_in_packages_if_exists.py new file mode 100644 index 0000000000..d84e1ec2bb --- /dev/null +++ b/python/scripts/run_tasks_in_packages_if_exists.py @@ -0,0 +1,29 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Run poe task(s) across all workspace packages, in parallel by default.""" + +import argparse +import sys +from pathlib import Path + +from task_runner import build_work_items, discover_projects, run_tasks + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Run poe task(s) across all workspace packages, in parallel by default." + ) + parser.add_argument("tasks", nargs="+", help="Task name(s) to run across packages") + parser.add_argument("--seq", action="store_true", help="Run sequentially instead of in parallel") + args = parser.parse_args() + + pyproject_file = Path(__file__).parent.parent / "pyproject.toml" + workspace_root = pyproject_file.parent + projects = discover_projects(pyproject_file) + + work_items = build_work_items(projects, args.tasks) + run_tasks(work_items, workspace_root, sequential=args.seq) + + +if __name__ == "__main__": + main() diff --git a/python/scripts/task_runner.py b/python/scripts/task_runner.py new file mode 100644 index 0000000000..a6e14ccaaa --- /dev/null +++ b/python/scripts/task_runner.py @@ -0,0 +1,150 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Shared utilities for running poe tasks across workspace packages in parallel.""" + +import concurrent.futures +import glob +import os +import subprocess +import sys +import time +from pathlib import Path + +import tomli +from rich import print + + +def discover_projects(workspace_pyproject_file: Path) -> list[Path]: + """Discover all workspace projects from pyproject.toml.""" + with workspace_pyproject_file.open("rb") as f: + data = tomli.load(f) + + projects = data["tool"]["uv"]["workspace"]["members"] + exclude = data["tool"]["uv"]["workspace"].get("exclude", []) + + all_projects: list[Path] = [] + for project in projects: + if "*" in project: + globbed = glob.glob(str(project), root_dir=workspace_pyproject_file.parent) + globbed_paths = [Path(p) for p in globbed] + all_projects.extend(globbed_paths) + else: + all_projects.append(Path(project)) + + for project in exclude: + if "*" in project: + globbed = glob.glob(str(project), root_dir=workspace_pyproject_file.parent) + globbed_paths = [Path(p) for p in globbed] + all_projects = [p for p in all_projects if p not in globbed_paths] + else: + all_projects = [p for p in all_projects if p != Path(project)] + + return all_projects + + +def extract_poe_tasks(file: Path) -> set[str]: + """Extract poe task names from a pyproject.toml file.""" + with file.open("rb") as f: + data = tomli.load(f) + + tasks = set(data.get("tool", {}).get("poe", {}).get("tasks", {}).keys()) + + # Check if there is an include too + include: str | None = data.get("tool", {}).get("poe", {}).get("include", None) + if include: + include_file = file.parent / include + if include_file.exists(): + tasks = tasks.union(extract_poe_tasks(include_file)) + + return tasks + + +def build_work_items(projects: list[Path], task_names: list[str]) -> list[tuple[Path, str]]: + """Build cross-product of (package, task) for packages that define the task.""" + work_items: list[tuple[Path, str]] = [] + for project in projects: + available_tasks = extract_poe_tasks(project / "pyproject.toml") + for task in task_names: + if task in available_tasks: + work_items.append((project, task)) + return work_items + + +def _run_task_subprocess(project: Path, task: str, workspace_root: Path) -> tuple[Path, str, int, str, str, float]: + """Run a single poe task in a project directory via subprocess.""" + start = time.monotonic() + cwd = workspace_root / project + result = subprocess.run( + ["uv", "run", "poe", task], + cwd=cwd, + capture_output=True, + text=True, + ) + elapsed = time.monotonic() - start + return (project, task, result.returncode, result.stdout, result.stderr, elapsed) + + +def _run_sequential(work_items: list[tuple[Path, str]]) -> None: + """Run tasks sequentially using in-process PoeThePoet (streaming output).""" + from poethepoet.app import PoeThePoet + + for project, task in work_items: + print(f"Running task {task} in {project}") + app = PoeThePoet(cwd=project) + result = app(cli_args=[task]) + if result: + sys.exit(result) + + +def _run_parallel(work_items: list[tuple[Path, str]], workspace_root: Path) -> None: + """Run all (package × task) combinations in parallel via subprocesses.""" + max_workers = min(len(work_items), os.cpu_count() or 4) + failures: list[tuple[Path, str, str, str]] = [] + completed = 0 + total = len(work_items) + + print(f"[cyan]Running {total} task(s) in parallel (max {max_workers} workers)...[/cyan]") + + with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: + futures = { + executor.submit(_run_task_subprocess, project, task, workspace_root): (project, task) + for project, task in work_items + } + for future in concurrent.futures.as_completed(futures): + project, task, returncode, stdout, stderr, elapsed = future.result() + completed += 1 + progress = f"[{completed}/{total}]" + if returncode == 0: + print(f" [green]✓[/green] {progress} {task} in {project} ({elapsed:.1f}s)") + else: + print(f" [red]✗[/red] {progress} {task} in {project} ({elapsed:.1f}s)") + failures.append((project, task, stdout, stderr)) + + if failures: + print(f"\n[red]{len(failures)} task(s) failed:[/red]") + for project, task, stdout, stderr in failures: + print(f"\n[red]{'='*60}[/red]") + print(f"[red]FAILED: {task} in {project}[/red]") + if stdout.strip(): + print(stdout) + if stderr.strip(): + sys.stderr.write(stderr) + sys.exit(1) + + print(f"\n[green]All {total} task(s) passed ✓[/green]") + + +def run_tasks(work_items: list[tuple[Path, str]], workspace_root: Path, *, sequential: bool = False) -> None: + """Run work items either in parallel or sequentially. + + Single items use in-process PoeThePoet for streaming output. + Multiple items use parallel subprocesses by default. + """ + if not work_items: + print("[yellow]No matching tasks found in any package[/yellow]") + return + + if sequential or len(work_items) == 1: + _run_sequential(work_items) + else: + _run_parallel(work_items, workspace_root) diff --git a/python/uv.lock b/python/uv.lock index 4759a01f66..e762f40433 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -104,62 +104,44 @@ dependencies = [ [package.dev-dependencies] dev = [ - { name = "autogen-agentchat", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "autogen-ext", extra = ["openai"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "flit", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "mypy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "poethepoet", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pre-commit", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "prek", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyright", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest-asyncio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest-cov", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pytest-env", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest-retry", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest-timeout", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest-xdist", extra = ["psutil"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "ruff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tomli", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "tomli-w", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "uv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -docs = [ - { name = "debugpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pip", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "py2docfx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] [package.metadata] requires-dist = [{ name = "agent-framework-core", extras = ["all"], editable = "packages/core" }] [package.metadata.requires-dev] dev = [ - { name = "autogen-agentchat" }, - { name = "autogen-ext", extras = ["openai"] }, { name = "flit", specifier = ">=3.12.0" }, { name = "mypy", specifier = ">=1.16.1" }, { name = "poethepoet", specifier = ">=0.36.0" }, - { name = "pre-commit", specifier = ">=3.7" }, + { name = "prek", specifier = ">=0.3.2" }, { name = "pyright", specifier = ">=1.1.402" }, { name = "pytest", specifier = ">=8.4.1" }, { name = "pytest-asyncio", specifier = ">=1.0.0" }, { name = "pytest-cov", specifier = ">=6.2.1" }, - { name = "pytest-env", specifier = ">=1.1.5" }, { name = "pytest-retry", specifier = ">=1" }, { name = "pytest-timeout", specifier = ">=2.3.1" }, { name = "pytest-xdist", extras = ["psutil"], specifier = ">=3.8.0" }, { name = "rich" }, { name = "ruff", specifier = ">=0.11.8" }, { name = "tomli" }, - { name = "tomli-w" }, { name = "uv", specifier = ">=0.9,<1.0.0" }, ] -docs = [ - { name = "debugpy", specifier = ">=1.8.16" }, - { name = "pip" }, - { name = "py2docfx", specifier = ">=0.1.22.dev2259826" }, -] [[package]] name = "agent-framework-a2a" @@ -562,7 +544,7 @@ tau2 = [ dev = [ { name = "mypy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "poethepoet", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pre-commit", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "prek", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyright", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -595,7 +577,7 @@ provides-extras = ["gaia", "lightning", "tau2", "math"] dev = [ { name = "mypy", specifier = ">=1.16.1" }, { name = "poethepoet", specifier = ">=0.36.0" }, - { name = "pre-commit", specifier = ">=3.7" }, + { name = "prek", specifier = ">=0.3.2" }, { name = "pyright", specifier = ">=1.1.402" }, { name = "pytest", specifier = ">=8.4.1" }, { name = "rich" }, @@ -736,15 +718,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d9/63/3e48da56d5121ddcefef8645ad5a3446b0974154111a14bf75ea2b5b3cc3/agentops-0.4.21-py3-none-any.whl", hash = "sha256:93b098ea77bc5f64dcae5031a8292531cb446d9d66e6c7ef2f21a66d4e4fb2f0", size = 309579, upload-time = "2025-08-29T06:36:53.855Z" }, ] -[[package]] -name = "aiofiles" -version = "25.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" }, -] - [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -887,15 +860,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] -[[package]] -name = "alabaster" -version = "0.7.16" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, -] - [[package]] name = "annotated-doc" version = "0.0.4" @@ -916,7 +880,7 @@ wheels = [ [[package]] name = "anthropic" -version = "0.78.0" +version = "0.79.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -928,9 +892,9 @@ dependencies = [ { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ec/51/32849a48f9b1cfe80a508fd269b20bd8f0b1357c70ba092890fde5a6a10b/anthropic-0.78.0.tar.gz", hash = "sha256:55fd978ab9b049c61857463f4c4e9e092b24f892519c6d8078cee1713d8af06e", size = 509136, upload-time = "2026-02-05T17:52:04.986Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/b1/91aea3f8fd180d01d133d931a167a78a3737b3fd39ccef2ae8d6619c24fd/anthropic-0.79.0.tar.gz", hash = "sha256:8707aafb3b1176ed6c13e2b1c9fb3efddce90d17aee5d8b83a86c70dcdcca871", size = 509825, upload-time = "2026-02-07T18:06:18.388Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/03/2f50931a942e5e13f80e24d83406714672c57964be593fc046d81369335b/anthropic-0.78.0-py3-none-any.whl", hash = "sha256:2a9887d2e99d1b0f9fe08857a1e9fe5d2d4030455dbf9ac65aab052e2efaeac4", size = 405485, upload-time = "2026-02-05T17:52:03.674Z" }, + { url = "https://files.pythonhosted.org/packages/95/b2/cc0b8e874a18d7da50b0fda8c99e4ac123f23bf47b471827c5f6f3e4a767/anthropic-0.79.0-py3-none-any.whl", hash = "sha256:04cbd473b6bbda4ca2e41dd670fe2f829a911530f01697d0a1e37321eb75f3cf", size = 405918, upload-time = "2026-02-07T18:06:20.246Z" }, ] [[package]] @@ -995,54 +959,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] -[[package]] -name = "autogen-agentchat" -version = "0.7.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "autogen-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e5/b6/df2f835ce3aaaa2716a3dfbbd4ab8855839184f08b35ce0baa23b26a1885/autogen_agentchat-0.7.5.tar.gz", hash = "sha256:8d9c718db52ef24a518806b3a0ef848f0e4c1902877675dc0abed73a8e6e7755", size = 147716, upload-time = "2025-09-30T06:16:14.413Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/82/23490a70837d77d691948863d393cef71a06d36903249f635b28f579292b/autogen_agentchat-0.7.5-py3-none-any.whl", hash = "sha256:d19ca8ec26cb15e071a56c4269140aea2bf3c718bdc7e06f6677af9a905815ba", size = 119302, upload-time = "2025-09-30T06:16:12.895Z" }, -] - -[[package]] -name = "autogen-core" -version = "0.7.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jsonref", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pillow", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/11/fea52bf3541c5308bed1ee9b9b3596fa510b2c5db893d32b649d22f02b87/autogen_core-0.7.5.tar.gz", hash = "sha256:70c2871389f1d0a7f6db8ef78717a51b7ce877ff4a08a836b7758d604dece203", size = 101980, upload-time = "2025-09-30T06:16:25.957Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/83/8ad899fca9dd2d2b3e5e37be13dd9e6aee3e53a621041b0624d74b07e1ee/autogen_core-0.7.5-py3-none-any.whl", hash = "sha256:4f4a0d3b88a36da75b2ef0d40be2d5e3a207cae7f7d951511e498ad1d68f8ef4", size = 101874, upload-time = "2025-09-30T06:16:24.306Z" }, -] - -[[package]] -name = "autogen-ext" -version = "0.7.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "autogen-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e5/c8/f0651372f814c48eb64ffe921166995b7734bec0df7f0ba663383e831f58/autogen_ext-0.7.5.tar.gz", hash = "sha256:711ab9238ea66ff2abef163c331e538092bdea661620727a4a9b2ebce1c22df9", size = 417568, upload-time = "2025-09-30T06:16:24.278Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/10/9333ba6c532086cce7ec7fb39e36b9a08afdbc39e2d3519f00af712e403a/autogen_ext-0.7.5-py3-none-any.whl", hash = "sha256:18cecc8aab37c7c4861fbad038a1017f0ef25e35e273aa158066ccf9d93fea4f", size = 331380, upload-time = "2025-09-30T06:16:22.832Z" }, -] - -[package.optional-dependencies] -openai = [ - { name = "aiofiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "openai", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "tiktoken", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] - [[package]] name = "azure-ai-agents" version = "1.2.0b5" @@ -1171,15 +1087,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d8/3a/6ef2047a072e54e1142718d433d50e9514c999a58f51abfff7902f3a72f8/azure_storage_blob-12.28.0-py3-none-any.whl", hash = "sha256:00fb1db28bf6a7b7ecaa48e3b1d5c83bfadacc5a678b77826081304bd87d6461", size = 431499, upload-time = "2026-01-06T23:48:58.995Z" }, ] -[[package]] -name = "babel" -version = "2.18.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/b2/51899539b6ceeeb420d40ed3cd4b7a40519404f9baf3d4ac99dc413a834b/babel-2.18.0.tar.gz", hash = "sha256:b80b99a14bd085fcacfa15c9165f651fbb3406e66cc603abf11c5750937c992d", size = 9959554, upload-time = "2026-02-01T12:30:56.078Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/f5/21d2de20e8b8b0408f0681956ca2c69f1320a3848ac50e6e7f39c6159675/babel-2.18.0-py3-none-any.whl", hash = "sha256:e2b422b277c2b9a9630c1d7903c2a00d0830c409c59ac8cae9081c92f1aeba35", size = 10196845, upload-time = "2026-02-01T12:30:53.445Z" }, -] - [[package]] name = "backoff" version = "2.2.1" @@ -1326,15 +1233,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, ] -[[package]] -name = "cfgv" -version = "3.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" }, -] - [[package]] name = "charset-normalizer" version = "3.4.4" @@ -1426,19 +1324,19 @@ wheels = [ [[package]] name = "claude-agent-sdk" -version = "0.1.31" +version = "0.1.33" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "mcp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/df/071dce5803c4db8cd53708bcda3b6022c1c4b68fc00e9007593309515286/claude_agent_sdk-0.1.31.tar.gz", hash = "sha256:b68c681083d7cc985dd3e48f73aabf459f056c1a7e1c5b9c47033c6af94da1a1", size = 61191, upload-time = "2026-02-06T02:01:51.043Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/aa/5c417ef464d3fa712d830cd56a9a79aef8dfb5bc3414aae4bae136cf4e73/claude_agent_sdk-0.1.33.tar.gz", hash = "sha256:134bf403bb7553d829dadec42c30ecef340f5d4ad1595c1bdef933a9ca3129cf", size = 61196, upload-time = "2026-02-07T19:19:53.372Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/7c/e249a3b4215e28a9722b3d9ab6057bceeeaa2b948530f022065ef2154555/claude_agent_sdk-0.1.31-py3-none-macosx_11_0_arm64.whl", hash = "sha256:801bacfe4192782a7cc7b61b0d23a57f061c069993dd3dfa8109aa2e7050a530", size = 54284257, upload-time = "2026-02-06T02:01:35.61Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a8/1a8288736aeafcc48e3dcb3326ec7f487dbf89ebba77d526e9464786a299/claude_agent_sdk-0.1.31-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:0b608e0cbfcedcb827427e6d16a73fe573d58e7f93e15f95435066feacbe6511", size = 68462461, upload-time = "2026-02-06T02:01:40.074Z" }, - { url = "https://files.pythonhosted.org/packages/26/7a/7dcd0b77263ed55b17554fa3a67a6772b788e7048a524fd06c9baa970564/claude_agent_sdk-0.1.31-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:d0cb30e026a22246e84d9237d23bb4df20be5146913a04d2802ddd37d4f8b8c9", size = 70173234, upload-time = "2026-02-06T02:01:44.486Z" }, - { url = "https://files.pythonhosted.org/packages/37/a5/4a8de7a9738f454b54aa97557f0fba9c74b0901ea418597008c668243fea/claude_agent_sdk-0.1.31-py3-none-win_amd64.whl", hash = "sha256:8ceca675c2770ad739bd1208362059a830e91c74efcf128045b5a7af14d36f2b", size = 72366975, upload-time = "2026-02-06T02:01:48.647Z" }, + { url = "https://files.pythonhosted.org/packages/72/31/1ac5d536013b1e38b37d71928a0db214cbd47e7bb815c21141dbc6dd93b6/claude_agent_sdk-0.1.33-py3-none-macosx_11_0_arm64.whl", hash = "sha256:57886a2dd124e5b3c9e12ec3e4841742ab3444d1e428b45ceaec8841c96698fa", size = 54323456, upload-time = "2026-02-07T19:19:39.407Z" }, + { url = "https://files.pythonhosted.org/packages/54/36/79c3feb3f2c95591b80de39a1d3097d30bc3a9a84fcff6422f5434f1187a/claude_agent_sdk-0.1.33-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:ea0f1e4fadeec766000122723c406a6f47c6210ea11bb5cc0c88af11ef7c940c", size = 69106772, upload-time = "2026-02-07T19:19:42.998Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/64d22ae767154da4629004a80e9f59f71b5070d55fcfade4efdfb06b1f7a/claude_agent_sdk-0.1.33-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:0ecd822c577b4ea2a52e51146a24dcea73eb69ff366bdb875785dadb116d593b", size = 69688592, upload-time = "2026-02-07T19:19:46.629Z" }, + { url = "https://files.pythonhosted.org/packages/b8/aa/83677a3d42b047bcacf4dbe730bf5189a106b5b6746ee83f6920e5d9729a/claude_agent_sdk-0.1.33-py3-none-win_amd64.whl", hash = "sha256:a9fbd09d8f947005e087340ecd0706ed35639c946b4bd49429d3132db4cb3751", size = 72211078, upload-time = "2026-02-07T19:19:50.528Z" }, ] [[package]] @@ -1828,35 +1726,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" }, ] -[[package]] -name = "debugpy" -version = "1.8.20" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/b7/cd8080344452e4874aae67c40d8940e2b4d47b01601a8fd9f44786c757c7/debugpy-1.8.20.tar.gz", hash = "sha256:55bc8701714969f1ab89a6d5f2f3d40c36f91b2cbe2f65d98bf8196f6a6a2c33", size = 1645207, upload-time = "2026-01-29T23:03:28.199Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/71/be/8bd693a0b9d53d48c8978fa5d889e06f3b5b03e45fd1ea1e78267b4887cb/debugpy-1.8.20-cp310-cp310-macosx_15_0_x86_64.whl", hash = "sha256:157e96ffb7f80b3ad36d808646198c90acb46fdcfd8bb1999838f0b6f2b59c64", size = 2099192, upload-time = "2026-01-29T23:03:29.707Z" }, - { url = "https://files.pythonhosted.org/packages/77/1b/85326d07432086a06361d493d2743edd0c4fc2ef62162be7f8618441ac37/debugpy-1.8.20-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:c1178ae571aff42e61801a38b007af504ec8e05fde1c5c12e5a7efef21009642", size = 3088568, upload-time = "2026-01-29T23:03:31.467Z" }, - { url = "https://files.pythonhosted.org/packages/e8/60/3e08462ee3eccd10998853eb35947c416e446bfe2bc37dbb886b9044586c/debugpy-1.8.20-cp310-cp310-win32.whl", hash = "sha256:c29dd9d656c0fbd77906a6e6a82ae4881514aa3294b94c903ff99303e789b4a2", size = 5284399, upload-time = "2026-01-29T23:03:33.678Z" }, - { url = "https://files.pythonhosted.org/packages/72/43/09d49106e770fe558ced5e80df2e3c2ebee10e576eda155dcc5670473663/debugpy-1.8.20-cp310-cp310-win_amd64.whl", hash = "sha256:3ca85463f63b5dd0aa7aaa933d97cbc47c174896dcae8431695872969f981893", size = 5316388, upload-time = "2026-01-29T23:03:35.095Z" }, - { url = "https://files.pythonhosted.org/packages/51/56/c3baf5cbe4dd77427fd9aef99fcdade259ad128feeb8a786c246adb838e5/debugpy-1.8.20-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:eada6042ad88fa1571b74bd5402ee8b86eded7a8f7b827849761700aff171f1b", size = 2208318, upload-time = "2026-01-29T23:03:36.481Z" }, - { url = "https://files.pythonhosted.org/packages/9a/7d/4fa79a57a8e69fe0d9763e98d1110320f9ecd7f1f362572e3aafd7417c9d/debugpy-1.8.20-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:7de0b7dfeedc504421032afba845ae2a7bcc32ddfb07dae2c3ca5442f821c344", size = 3171493, upload-time = "2026-01-29T23:03:37.775Z" }, - { url = "https://files.pythonhosted.org/packages/7d/f2/1e8f8affe51e12a26f3a8a8a4277d6e60aa89d0a66512f63b1e799d424a4/debugpy-1.8.20-cp311-cp311-win32.whl", hash = "sha256:773e839380cf459caf73cc533ea45ec2737a5cc184cf1b3b796cd4fd98504fec", size = 5209240, upload-time = "2026-01-29T23:03:39.109Z" }, - { url = "https://files.pythonhosted.org/packages/d5/92/1cb532e88560cbee973396254b21bece8c5d7c2ece958a67afa08c9f10dc/debugpy-1.8.20-cp311-cp311-win_amd64.whl", hash = "sha256:1f7650546e0eded1902d0f6af28f787fa1f1dbdbc97ddabaf1cd963a405930cb", size = 5233481, upload-time = "2026-01-29T23:03:40.659Z" }, - { url = "https://files.pythonhosted.org/packages/14/57/7f34f4736bfb6e00f2e4c96351b07805d83c9a7b33d28580ae01374430f7/debugpy-1.8.20-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:4ae3135e2089905a916909ef31922b2d733d756f66d87345b3e5e52b7a55f13d", size = 2550686, upload-time = "2026-01-29T23:03:42.023Z" }, - { url = "https://files.pythonhosted.org/packages/ab/78/b193a3975ca34458f6f0e24aaf5c3e3da72f5401f6054c0dfd004b41726f/debugpy-1.8.20-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:88f47850a4284b88bd2bfee1f26132147d5d504e4e86c22485dfa44b97e19b4b", size = 4310588, upload-time = "2026-01-29T23:03:43.314Z" }, - { url = "https://files.pythonhosted.org/packages/c1/55/f14deb95eaf4f30f07ef4b90a8590fc05d9e04df85ee379712f6fb6736d7/debugpy-1.8.20-cp312-cp312-win32.whl", hash = "sha256:4057ac68f892064e5f98209ab582abfee3b543fb55d2e87610ddc133a954d390", size = 5331372, upload-time = "2026-01-29T23:03:45.526Z" }, - { url = "https://files.pythonhosted.org/packages/a1/39/2bef246368bd42f9bd7cba99844542b74b84dacbdbea0833e610f384fee8/debugpy-1.8.20-cp312-cp312-win_amd64.whl", hash = "sha256:a1a8f851e7cf171330679ef6997e9c579ef6dd33c9098458bd9986a0f4ca52e3", size = 5372835, upload-time = "2026-01-29T23:03:47.245Z" }, - { url = "https://files.pythonhosted.org/packages/15/e2/fc500524cc6f104a9d049abc85a0a8b3f0d14c0a39b9c140511c61e5b40b/debugpy-1.8.20-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:5dff4bb27027821fdfcc9e8f87309a28988231165147c31730128b1c983e282a", size = 2539560, upload-time = "2026-01-29T23:03:48.738Z" }, - { url = "https://files.pythonhosted.org/packages/90/83/fb33dcea789ed6018f8da20c5a9bc9d82adc65c0c990faed43f7c955da46/debugpy-1.8.20-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:84562982dd7cf5ebebfdea667ca20a064e096099997b175fe204e86817f64eaf", size = 4293272, upload-time = "2026-01-29T23:03:50.169Z" }, - { url = "https://files.pythonhosted.org/packages/a6/25/b1e4a01bfb824d79a6af24b99ef291e24189080c93576dfd9b1a2815cd0f/debugpy-1.8.20-cp313-cp313-win32.whl", hash = "sha256:da11dea6447b2cadbf8ce2bec59ecea87cc18d2c574980f643f2d2dfe4862393", size = 5331208, upload-time = "2026-01-29T23:03:51.547Z" }, - { url = "https://files.pythonhosted.org/packages/13/f7/a0b368ce54ffff9e9028c098bd2d28cfc5b54f9f6c186929083d4c60ba58/debugpy-1.8.20-cp313-cp313-win_amd64.whl", hash = "sha256:eb506e45943cab2efb7c6eafdd65b842f3ae779f020c82221f55aca9de135ed7", size = 5372930, upload-time = "2026-01-29T23:03:53.585Z" }, - { url = "https://files.pythonhosted.org/packages/33/2e/f6cb9a8a13f5058f0a20fe09711a7b726232cd5a78c6a7c05b2ec726cff9/debugpy-1.8.20-cp314-cp314-macosx_15_0_universal2.whl", hash = "sha256:9c74df62fc064cd5e5eaca1353a3ef5a5d50da5eb8058fcef63106f7bebe6173", size = 2538066, upload-time = "2026-01-29T23:03:54.999Z" }, - { url = "https://files.pythonhosted.org/packages/c5/56/6ddca50b53624e1ca3ce1d1e49ff22db46c47ea5fb4c0cc5c9b90a616364/debugpy-1.8.20-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:077a7447589ee9bc1ff0cdf443566d0ecf540ac8aa7333b775ebcb8ce9f4ecad", size = 4269425, upload-time = "2026-01-29T23:03:56.518Z" }, - { url = "https://files.pythonhosted.org/packages/c5/d9/d64199c14a0d4c476df46c82470a3ce45c8d183a6796cfb5e66533b3663c/debugpy-1.8.20-cp314-cp314-win32.whl", hash = "sha256:352036a99dd35053b37b7803f748efc456076f929c6a895556932eaf2d23b07f", size = 5331407, upload-time = "2026-01-29T23:03:58.481Z" }, - { url = "https://files.pythonhosted.org/packages/e0/d9/1f07395b54413432624d61524dfd98c1a7c7827d2abfdb8829ac92638205/debugpy-1.8.20-cp314-cp314-win_amd64.whl", hash = "sha256:a98eec61135465b062846112e5ecf2eebb855305acc1dfbae43b72903b8ab5be", size = 5372521, upload-time = "2026-01-29T23:03:59.864Z" }, - { url = "https://files.pythonhosted.org/packages/e0/c3/7f67dea8ccf8fdcb9c99033bbe3e90b9e7395415843accb81428c441be2d/debugpy-1.8.20-py2.py3-none-any.whl", hash = "sha256:5be9bed9ae3be00665a06acaa48f8329d2b9632f15fd09f6a9a8c8d9907e54d7", size = 5337658, upload-time = "2026-01-29T23:04:17.404Z" }, -] - [[package]] name = "deepdiff" version = "8.6.1" @@ -1869,15 +1738,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/e6/efe534ef0952b531b630780e19cabd416e2032697019d5295defc6ef9bd9/deepdiff-8.6.1-py3-none-any.whl", hash = "sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b", size = 91378, upload-time = "2025-09-03T19:40:39.679Z" }, ] -[[package]] -name = "distlib" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, -] - [[package]] name = "distro" version = "1.9.0" @@ -1907,11 +1767,11 @@ wheels = [ [[package]] name = "docutils" -version = "0.19" +version = "0.22.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/330ea8d383eb2ce973df34d1239b3b21e91cd8c865d21ff82902d952f91f/docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6", size = 2056383, upload-time = "2022-07-05T20:17:31.045Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/b6/03bb70946330e88ffec97aefd3ea75ba575cb2e762061e0e62a213befee8/docutils-0.22.4.tar.gz", hash = "sha256:4db53b1fde9abecbb74d91230d32ab626d94f6badfc575d6db9194a49df29968", size = 2291750, upload-time = "2025-12-18T19:00:26.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/69/e391bd51bc08ed9141ecd899a0ddb61ab6465309f1eb470905c0c8868081/docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc", size = 570472, upload-time = "2022-07-05T20:17:26.388Z" }, + { url = "https://files.pythonhosted.org/packages/02/10/5da547df7a391dcde17f59520a231527b8571e6f46fc8efb02ccb370ab12/docutils-0.22.4-py3-none-any.whl", hash = "sha256:d0013f540772d1420576855455d050a2180186c91c15779301ac2ccb3eeb68de", size = 633196, upload-time = "2025-12-18T19:00:18.077Z" }, ] [[package]] @@ -1979,7 +1839,7 @@ wheels = [ [[package]] name = "fastapi" -version = "0.128.2" +version = "0.128.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1988,9 +1848,9 @@ dependencies = [ { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-inspection", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4f/6e/45fb5390d46d7918426ea1c1ec4b06c1d3fd70be4a47a690ccb4f1f9438a/fastapi-0.128.2.tar.gz", hash = "sha256:7db9eb891866ac3a08e03f844b99e343a2c1cc41247e68e006c90b38d2464ea1", size = 376129, upload-time = "2026-02-05T19:48:33.957Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/d4/811e7283aaaa84f1e7bd55fb642b58f8c01895e4884a9b7628cb55e00d63/fastapi-0.128.5.tar.gz", hash = "sha256:a7173579fc162d6471e3c6fbd9a4b7610c7a3b367bcacf6c4f90d5d022cab711", size = 374636, upload-time = "2026-02-08T10:22:30.493Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/f2/80df24108572630bb2adef3d97f1e774b18ec25bfbab5528f36cba6478c0/fastapi-0.128.2-py3-none-any.whl", hash = "sha256:55bfd9490ca0125707d80e785583c2dc57840bb66e3a0bbc087d20c364964dc0", size = 104032, upload-time = "2026-02-05T19:48:32.118Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e0/511972dba23ee76c0e9d09d1ae95e916fc8ebce5322b2b8b65a481428b10/fastapi-0.128.5-py3-none-any.whl", hash = "sha256:bceec0de8aa6564599c5bcc0593b0d287703562c848271fca8546fd2c87bf4dd", size = 103677, upload-time = "2026-02-08T10:22:28.919Z" }, ] [[package]] @@ -2351,16 +2211,20 @@ wheels = [ [[package]] name = "github-copilot-sdk" -version = "0.1.22" +version = "0.1.23" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/b7/ae720a503c9b329f8c95036a04fae8e023db8dcdce9d24382259865f0760/github_copilot_sdk-0.1.22.tar.gz", hash = "sha256:8ea4534f0c8ab0fa04e0fec4c3ebd42d737cf7772277e4f8eb58a9fadac6bdb5", size = 97324, upload-time = "2026-02-05T17:33:33.726Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/2e/68aa28018778fa86a8392b37c6a883d7a9a24b715ba5baa470ce018f1542/github_copilot_sdk-0.1.22-py3-none-any.whl", hash = "sha256:f75d84dd2633138834330597400b28fefbf8bd75541f78083831f58c9bdde81a", size = 44149, upload-time = "2026-02-05T17:33:31.948Z" }, + { url = "https://files.pythonhosted.org/packages/1a/69/08f478521739e3fbf6c7f7a24ba503c8f80f735be17ef0b08f42b12511c4/github_copilot_sdk-0.1.23-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:9b761445e47b757c63e3ad5596dbdc4fb84720612cad00a12425af056fbadb48", size = 57873198, upload-time = "2026-02-06T18:10:44.07Z" }, + { url = "https://files.pythonhosted.org/packages/d4/6a/c0262ea649a89518e3897d7c464e88aa623d7bb9a6861b7674fda5033c4c/github_copilot_sdk-0.1.23-py3-none-macosx_11_0_arm64.whl", hash = "sha256:27277aca84d767336590a426a48a00ded20533e6508be97c265eb3b64f6e921c", size = 54627888, upload-time = "2026-02-06T18:10:48.352Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fb/63f147993c840c6b863250f10967dbc45095ab9d2a9ad1c86ca0588c65d5/github_copilot_sdk-0.1.23-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:b929027edeb147683c6625c8a6b90e7c6d64a72ea0567cc8e56c5c66bec7a37d", size = 60760946, upload-time = "2026-02-06T18:10:51.574Z" }, + { url = "https://files.pythonhosted.org/packages/07/2f/0fdeb797e26da3f57c4a84bf3bdd6db9ba4e8974450c8ea0f32fd81c48ba/github_copilot_sdk-0.1.23-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:31d1adb09f342c8a466f64e8b81e6470fee6013d31e516cec7a33a44b6b0a4b4", size = 58941430, upload-time = "2026-02-06T18:10:55.502Z" }, + { url = "https://files.pythonhosted.org/packages/45/9e/4e569de749066fb4c796954c5e01118d52e2cd05b42bf7a1451660851a8e/github_copilot_sdk-0.1.23-py3-none-win_amd64.whl", hash = "sha256:1e1c889aab857feadda546842c4c4730ddb0d63f04aa5ccaae2d83f4bc348eb7", size = 57636441, upload-time = "2026-02-06T18:10:59.359Z" }, + { url = "https://files.pythonhosted.org/packages/0e/65/15c94c7ea647b42123124e6f0daa7f93df630189188cf9e4ce36c5f799d9/github_copilot_sdk-0.1.23-py3-none-win_arm64.whl", hash = "sha256:d1ab5816b0ebd6507ddc6e11ccb5aac4eef2069f2b834b39fcceb909b0cf80bf", size = 55149715, upload-time = "2026-02-06T18:11:02.84Z" }, ] [[package]] @@ -2757,15 +2621,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, ] -[[package]] -name = "identify" -version = "2.6.16" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" }, -] - [[package]] name = "idna" version = "3.11" @@ -2775,15 +2630,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] -[[package]] -name = "imagesize" -version = "1.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, -] - [[package]] name = "importlib-metadata" version = "8.7.1" @@ -2962,15 +2808,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/35/5a/73ecb3d82f8615f32ccdadeb9356726d6cae3a4bbc840b437ceb95708063/jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6", size = 30105, upload-time = "2024-11-20T17:58:30.418Z" }, ] -[[package]] -name = "jsonref" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/0d/c1f3277e90ccdb50d33ed5ba1ec5b3f0a242ed8c1b1a85d3afeb68464dca/jsonref-1.1.0.tar.gz", hash = "sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552", size = 8814, upload-time = "2023-01-16T16:10:04.455Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/ec/e1db9922bceb168197a558a2b8c03a7963f1afe93517ddd3cf99f202f996/jsonref-1.1.0-py3-none-any.whl", hash = "sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9", size = 9425, upload-time = "2023-01-16T16:10:02.255Z" }, -] - [[package]] name = "jsonschema" version = "4.26.0" @@ -3108,7 +2945,7 @@ wheels = [ [[package]] name = "langfuse" -version = "3.12.1" +version = "3.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3122,9 +2959,9 @@ dependencies = [ { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/c28a09b696a1b908cf59b201d01e69066aeab804163d8dba055811790ed5/langfuse-3.12.1.tar.gz", hash = "sha256:da3bf4c0469eab4305f88a63cbb5ef89cf7542abbbcc9136a35c1bc708810520", size = 232768, upload-time = "2026-01-27T06:11:24.648Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/d0/744e5613c728427330ac2049da0f54fc313e8bf84622f71b025bfba65496/langfuse-3.13.0.tar.gz", hash = "sha256:dacea8111ca4442e97dbfec4f8d676cf9709b35357a26e468f8887b95de0012f", size = 233420, upload-time = "2026-02-06T19:54:14.415Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/51/a5752417d704831f8c9fc4d7ec070342dee21d781d92e6fe937e60912e61/langfuse-3.12.1-py3-none-any.whl", hash = "sha256:ccf091ed6b6e0d9d4dbc95ad5cbb0f60c4452ce95b18c114ed5896f4546af38f", size = 416999, upload-time = "2026-01-27T06:11:22.657Z" }, + { url = "https://files.pythonhosted.org/packages/3d/63/148382e8e79948f7e5c9c137288e504bb88117574eb7e7c886b4fb470b4b/langfuse-3.13.0-py3-none-any.whl", hash = "sha256:71912ddac1cc831a65df895eae538a556f564c094ae51473e747426e9ded1a9d", size = 417626, upload-time = "2026-02-06T19:54:12.547Z" }, ] [[package]] @@ -3202,7 +3039,7 @@ wheels = [ [[package]] name = "litellm" -version = "1.81.8" +version = "1.81.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3218,9 +3055,9 @@ dependencies = [ { name = "tiktoken", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tokenizers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/eb/1d/e8f95dd1fc0eed36f2698ca82d8a0693d5388c6f2f1718f3f5ed472daaf4/litellm-1.81.8.tar.gz", hash = "sha256:5cc6547697748b8ca38d17d755662871da125df6e378cc987eaf2208a15626fb", size = 14066801, upload-time = "2026-02-05T05:56:03.37Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/8f/2a08f3d86fd008b4b02254649883032068378a8551baed93e8d9dcbbdb5d/litellm-1.81.9.tar.gz", hash = "sha256:a2cd9bc53a88696c21309ef37c55556f03c501392ed59d7f4250f9932917c13c", size = 16276983, upload-time = "2026-02-07T21:14:24.473Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/5a/6f391c2f251553dae98b6edca31c070d7e2291cef6153ae69e0688159093/litellm-1.81.8-py3-none-any.whl", hash = "sha256:78cca92f36bc6c267c191d1fe1e2630c812bff6daec32c58cade75748c2692f6", size = 12286316, upload-time = "2026-02-05T05:56:00.248Z" }, + { url = "https://files.pythonhosted.org/packages/0b/8b/672fc06c8a2803477e61e0de383d3c6e686e0f0fc62789c21f0317494076/litellm-1.81.9-py3-none-any.whl", hash = "sha256:24ee273bc8a62299fbb754035f83fb7d8d44329c383701a2bd034f4fd1c19084", size = 14433170, upload-time = "2026-02-07T21:14:21.469Z" }, ] [package.optional-dependencies] @@ -3253,20 +3090,20 @@ proxy = [ [[package]] name = "litellm-enterprise" -version = "0.1.27" +version = "0.1.31" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/b5/2304eed58f0142b3570c50580b451db9b7709012d5b436c2100783ae2220/litellm_enterprise-0.1.27.tar.gz", hash = "sha256:aa40c87f7c8df64beb79e75f71e1b5c0a458350efa68527e3491e6f27f2cbd57", size = 46829, upload-time = "2025-12-18T00:01:33.398Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/ef/4d7baae0503cbab015cb03238633887725b553a22adaf9a011b35cd7338f/litellm_enterprise-0.1.31.tar.gz", hash = "sha256:684d09daa3ededf1394df4ec1439aab606b884b68af2c92c478af0784a30e588", size = 50205, upload-time = "2026-02-06T05:31:55.311Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/23/ec61a6aa76b6938d3de8cad206875b0500e1df234fa3535b282b1a4850b5/litellm_enterprise-0.1.27-py3-none-any.whl", hash = "sha256:41b9d41d04123f492060a742091006dc1d182b54ce3a1c0e18ee75d623c63e91", size = 108107, upload-time = "2025-12-18T00:01:31.966Z" }, + { url = "https://files.pythonhosted.org/packages/be/8a/1e06af78b18d62e1dbb457f60cc78a82543217db81cb780af080b4dd985d/litellm_enterprise-0.1.31-py3-none-any.whl", hash = "sha256:7b0f750343e6f28c88e1557c656a6bea50fa6c8990a13e86ea20497cf666c79b", size = 112741, upload-time = "2026-02-06T05:31:54.311Z" }, ] [[package]] name = "litellm-proxy-extras" -version = "0.4.30" +version = "0.4.33" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/a1/00d2e91a7a91335a7d7f43dfb8316142879782c22ef59eca5d0ced055bf0/litellm_proxy_extras-0.4.30.tar.gz", hash = "sha256:5d32f8dc3d37d36fb15ab6995fea706dd8a453ff7f12e70b47cba35e5368da10", size = 23752, upload-time = "2026-02-05T03:54:00.351Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/4f/1e8644cdda2892d2dc8151153ca4d8a6fc44000363677a52f9988e56713a/litellm_proxy_extras-0.4.33.tar.gz", hash = "sha256:133dc5476b540d99e75d4baef622267e7344ced97737c174679baff429e7f212", size = 23973, upload-time = "2026-02-07T19:07:32.67Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/80/5b7ae7b39a79ca79722dd9049b3b4227b4540cb97006c8ef26c43af74db8/litellm_proxy_extras-0.4.30-py3-none-any.whl", hash = "sha256:0b7df68f0968eb817462b847eaee81bba23d935adb2e84d2e342a77711887051", size = 51217, upload-time = "2026-02-05T03:54:02.128Z" }, + { url = "https://files.pythonhosted.org/packages/b7/c0/b9960391b983306c39f1fa28e2eedf5d0e2048879fde8707a2d80896ed10/litellm_proxy_extras-0.4.33-py3-none-any.whl", hash = "sha256:bebea1b091490df19cfa773bd311f08254dee5bb53f92d282b7a5bdfba936334", size = 52533, upload-time = "2026-02-07T19:07:31.665Z" }, ] [[package]] @@ -4046,7 +3883,7 @@ wheels = [ [[package]] name = "openai-agents" -version = "0.8.0" +version = "0.8.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "griffe", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -4057,9 +3894,9 @@ dependencies = [ { name = "types-requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/57/724c73f158dec760a6e689e2415ab1b85bc5ff21508d82af91d23c9580e9/openai_agents-0.8.0.tar.gz", hash = "sha256:0ea66356ace1e158b09ab173534cacbc435d4a06e3203d04978dd69531729fc3", size = 2342265, upload-time = "2026-02-05T02:51:52.293Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/43/ccea6b70e3c4399eea24a7e0c0cde9e05727781e5b7dd2c00e2cebe09961/openai_agents-0.8.1.tar.gz", hash = "sha256:32dc6124359397e5775e936e621892576a0b2f5c88b3fc548a084334f6918541", size = 2373798, upload-time = "2026-02-06T22:44:24.24Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/61/7c590176c664845e75961a7755f58997b404fb633073a9ddba1151582033/openai_agents-0.8.0-py3-none-any.whl", hash = "sha256:1a8b63f10f8828fb5516fa4917ee26d03956893f8f09e38cfcf33ec60ffcd546", size = 373746, upload-time = "2026-02-05T02:51:50.501Z" }, + { url = "https://files.pythonhosted.org/packages/f6/3f/49ff704c933cf2a3467c040b13231258bb1f2fa66d995c3b62b3a13c2eb4/openai_agents-0.8.1-py3-none-any.whl", hash = "sha256:a29916690f4ca2d67c0d782abbff99350ce2a7cee0067b8dd2c2297e38a3714a", size = 376922, upload-time = "2026-02-06T22:44:21.977Z" }, ] [[package]] @@ -4601,15 +4438,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/de/f0/c81e05b613866b76d2d1066490adf1a3dbc4ee9d9c839961c3fc8a6997af/pip-26.0.1-py3-none-any.whl", hash = "sha256:bdb1b08f4274833d62c1aa29e20907365a2ceb950410df15fc9521bad440122b", size = 1787723, upload-time = "2026-02-05T02:20:16.416Z" }, ] -[[package]] -name = "platformdirs" -version = "4.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, -] - [[package]] name = "plotly" version = "6.5.2" @@ -4643,44 +4471,44 @@ wheels = [ [[package]] name = "poethepoet" -version = "0.40.0" +version = "0.41.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pastel", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/9d/054c8435b03324ed9abd5d5ab8c45065b1f42c23952cd23f13a5921d8465/poethepoet-0.40.0.tar.gz", hash = "sha256:91835f00d03d6c4f0e146f80fa510e298ad865e7edd27fe4cb9c94fdc090791b", size = 81114, upload-time = "2026-01-05T19:09:13.116Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/b9/fa92286560f70eaa40d473ea48376d20c6c21f63627d33c6bb1c5e385175/poethepoet-0.41.0.tar.gz", hash = "sha256:dcaad621dc061f6a90b17d091bebb9ca043d67bfe9bd6aa4185aea3ebf7ff3e6", size = 87780, upload-time = "2026-02-08T20:45:36.061Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/bc/73327d12b176abea7a3c6c7d760e1a953992f7b59d72c0354e39d7a353b5/poethepoet-0.40.0-py3-none-any.whl", hash = "sha256:afd276ae31d5c53573c0c14898118d4848ccee3709b6b0be6a1c6cbe522bbc8a", size = 106672, upload-time = "2026-01-05T19:09:11.536Z" }, + { url = "https://files.pythonhosted.org/packages/5d/5e/0b83e0222ce5921b3f9081eeca8c6fb3e1cfd5ca0d06338adf93b28ce061/poethepoet-0.41.0-py3-none-any.whl", hash = "sha256:4bab9fd8271664c5d21407e8f12827daeb6aa484dc6cc7620f0c3b4e62b42ee4", size = 113590, upload-time = "2026-02-08T20:45:34.697Z" }, ] [[package]] name = "polars" -version = "1.38.0" +version = "1.38.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "polars-runtime-32", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/56/bce1c1244431b0ebc4e5d413fdbcf7f85ec30fc98595fcfb7328a869d794/polars-1.38.0.tar.gz", hash = "sha256:4dee569944c613d8c621eb709e452354e1570bd3d47ccb2d3d36681fb1bd2cf6", size = 717801, upload-time = "2026-02-04T12:00:34.246Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/5e/208a24471a433bcd0e9a6889ac49025fd4daad2815c8220c5bd2576e5f1b/polars-1.38.1.tar.gz", hash = "sha256:803a2be5344ef880ad625addfb8f641995cfd777413b08a10de0897345778239", size = 717667, upload-time = "2026-02-06T18:13:23.013Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/47/61e7a47f77e321aa1cbf4141cc60df9d6e63b9f469c5525226535552a04c/polars-1.38.0-py3-none-any.whl", hash = "sha256:d7a31b47da8c9522aa38908c46ac72eab8eaf0c992e024f9c95fedba4cbe7759", size = 810116, upload-time = "2026-02-04T11:59:21.425Z" }, + { url = "https://files.pythonhosted.org/packages/0a/49/737c1a6273c585719858261753da0b688454d1b634438ccba8a9c4eb5aab/polars-1.38.1-py3-none-any.whl", hash = "sha256:a29479c48fed4984d88b656486d221f638cba45d3e961631a50ee5fdde38cb2c", size = 810368, upload-time = "2026-02-06T18:11:55.819Z" }, ] [[package]] name = "polars-runtime-32" -version = "1.38.0" +version = "1.38.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/8d/8f5764d722ad16ddb1b6db997aca7a41110dad446000ee2e3f8f48503f0e/polars_runtime_32-1.38.0.tar.gz", hash = "sha256:69ba986bff34f70d7eab931005e5d81dd4dc6c5c12e3532a4bd0fc7022671692", size = 2812354, upload-time = "2026-02-04T12:00:36.041Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/4b/04d6b3fb7cf336fbe12fbc4b43f36d1783e11bb0f2b1e3980ec44878df06/polars_runtime_32-1.38.1.tar.gz", hash = "sha256:04f20ed1f5c58771f34296a27029dc755a9e4b1390caeaef8f317e06fdfce2ec", size = 2812631, upload-time = "2026-02-06T18:13:25.206Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/eb/a8981ec070dd9bea9569292f38b0268159e39f63f5376ffae27a0c7d2ee7/polars_runtime_32-1.38.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:03f43c10a419837b89a493e946090cdaee08ce50a8d1933f2e8ac3a6874d7db4", size = 44106460, upload-time = "2026-02-04T11:59:23.546Z" }, - { url = "https://files.pythonhosted.org/packages/64/de/c2a2037b2d658b91067647b99be43bc91af3a7b4868e32efcc118f383add/polars_runtime_32-1.38.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:d664e53cba734e9fbed87d1c33078a13b5fc39b3e8790318fc65fa78954ea2d0", size = 40228076, upload-time = "2026-02-04T11:59:26.497Z" }, - { url = "https://files.pythonhosted.org/packages/4a/0f/9204210e7d05b3953813bb09627585c161221f512f2672b31065a02f4727/polars_runtime_32-1.38.0-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c073c7b7e6e559769e10cdadbafce86d32b0709d5790de920081c6129acae507", size = 41988273, upload-time = "2026-02-04T11:59:29.01Z" }, - { url = "https://files.pythonhosted.org/packages/89/64/4c5dbb1c2d2c025f8e7c7e433bd343c4fc955ceadd087a7ad456de8668f8/polars_runtime_32-1.38.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8806ddb684b17ae8b0bcb91d8d5ba361b04b0a31d77ce7f861d16b47734b3012", size = 45749469, upload-time = "2026-02-04T11:59:32.292Z" }, - { url = "https://files.pythonhosted.org/packages/d7/f8/da2d324d686b1fc438dfb721677fb44f7f5aab6ae0d1fa5b281e986fde82/polars_runtime_32-1.38.0-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c7b41163189bd3305fe2307e66fe478b35c4faa467777d74c32b70b52292039b", size = 42159740, upload-time = "2026-02-04T11:59:35.608Z" }, - { url = "https://files.pythonhosted.org/packages/37/88/fe02e4450e9b582ea6f1a7490921208a9c3a0a1efdf976aadbaa4cae73bb/polars_runtime_32-1.38.0-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e944f924a99750909299fa701edb07a63a5988e5ee58d673993f3d9147a22276", size = 45327635, upload-time = "2026-02-04T11:59:38.28Z" }, - { url = "https://files.pythonhosted.org/packages/68/db/9bb8007a4bea76b476537740ed18c8bccd809faa390ca1443134e98f8b60/polars_runtime_32-1.38.0-cp310-abi3-win_amd64.whl", hash = "sha256:46fbfb4ee6f8e1914dc0babfb6a138ead552db05a2d9e531c1fb19411b1a6744", size = 45670197, upload-time = "2026-02-04T11:59:41.297Z" }, - { url = "https://files.pythonhosted.org/packages/58/78/28f793ec2e1cff72c0ced1bc9186c9b4dbfe44ca8316df11b2aa8039764c/polars_runtime_32-1.38.0-cp310-abi3-win_arm64.whl", hash = "sha256:ed0e6d7a546de9179e5715bffe9d3b94ba658d5655bbbf44943e138e061dcc90", size = 41637784, upload-time = "2026-02-04T11:59:44.396Z" }, + { url = "https://files.pythonhosted.org/packages/ae/a2/a00defbddadd8cf1042f52380dcba6b6592b03bac8e3b34c436b62d12d3b/polars_runtime_32-1.38.1-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:18154e96044724a0ac38ce155cf63aa03c02dd70500efbbf1a61b08cadd269ef", size = 44108001, upload-time = "2026-02-06T18:11:58.127Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/599ff3709e6a303024efd7edfd08cf8de55c6ac39527d8f41cbc4399385f/polars_runtime_32-1.38.1-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:c49acac34cc4049ed188f1eb67d6ff3971a39b4af7f7b734b367119970f313ac", size = 40230140, upload-time = "2026-02-06T18:12:01.181Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8c/3ac18d6f89dc05fe2c7c0ee1dc5b81f77a5c85ad59898232c2500fe2ebbf/polars_runtime_32-1.38.1-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fef2ef2626a954e010e006cc8e4de467ecf32d08008f130cea1c78911f545323", size = 41994039, upload-time = "2026-02-06T18:12:04.332Z" }, + { url = "https://files.pythonhosted.org/packages/f2/5a/61d60ec5cc0ab37cbd5a699edb2f9af2875b7fdfdfb2a4608ca3cc5f0448/polars_runtime_32-1.38.1-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a5f7a8125e2d50e2e060296551c929aec09be23a9edcb2b12ca923f555a5ba", size = 45755804, upload-time = "2026-02-06T18:12:07.846Z" }, + { url = "https://files.pythonhosted.org/packages/91/54/02cd4074c98c361ccd3fec3bcb0bd68dbc639c0550c42a4436b0ff0f3ccf/polars_runtime_32-1.38.1-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:10d19cd9863e129273b18b7fcaab625b5c8143c2d22b3e549067b78efa32e4fa", size = 42159605, upload-time = "2026-02-06T18:12:10.919Z" }, + { url = "https://files.pythonhosted.org/packages/8e/f3/b2a5e720cc56eaa38b4518e63aa577b4bbd60e8b05a00fe43ca051be5879/polars_runtime_32-1.38.1-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61e8d73c614b46a00d2f853625a7569a2e4a0999333e876354ac81d1bf1bb5e2", size = 45336615, upload-time = "2026-02-06T18:12:14.074Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8d/ee2e4b7de948090cfb3df37d401c521233daf97bfc54ddec5d61d1d31618/polars_runtime_32-1.38.1-cp310-abi3-win_amd64.whl", hash = "sha256:08c2b3b93509c1141ac97891294ff5c5b0c548a373f583eaaea873a4bf506437", size = 45680732, upload-time = "2026-02-06T18:12:19.097Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/72c216f4ab0c82b907009668f79183ae029116ff0dd245d56ef58aac48e7/polars_runtime_32-1.38.1-cp310-abi3-win_arm64.whl", hash = "sha256:6d07d0cc832bfe4fb54b6e04218c2c27afcfa6b9498f9f6bbf262a00d58cc7c4", size = 41639413, upload-time = "2026-02-06T18:12:22.044Z" }, ] [[package]] @@ -4726,19 +4554,27 @@ wheels = [ ] [[package]] -name = "pre-commit" -version = "4.5.1" +name = "prek" +version = "0.3.2" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cfgv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "identify", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "nodeenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "virtualenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/f5/ee52def928dd1355c20bcfcf765e1e61434635c33f3075e848e7b83a157b/prek-0.3.2.tar.gz", hash = "sha256:dce0074ff1a21290748ca567b4bda7553ee305a8c7b14d737e6c58364a499364", size = 334229, upload-time = "2026-02-06T13:49:47.539Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" }, + { url = "https://files.pythonhosted.org/packages/76/69/70a5fc881290a63910494df2677c0fb241d27cfaa435bbcd0de5cd2e2443/prek-0.3.2-py3-none-linux_armv6l.whl", hash = "sha256:4f352f9c3fc98aeed4c8b2ec4dbf16fc386e45eea163c44d67e5571489bd8e6f", size = 4614960, upload-time = "2026-02-06T13:50:05.818Z" }, + { url = "https://files.pythonhosted.org/packages/c0/15/a82d5d32a2207ccae5d86ea9e44f2b93531ed000faf83a253e8d1108e026/prek-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4a000cfbc3a6ec7d424f8be3c3e69ccd595448197f92daac8652382d0acc2593", size = 4622889, upload-time = "2026-02-06T13:49:53.662Z" }, + { url = "https://files.pythonhosted.org/packages/89/75/ea833b58a12741397017baef9b66a6e443bfa8286ecbd645d14111446280/prek-0.3.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5436bdc2702cbd7bcf9e355564ae66f8131211e65fefae54665a94a07c3d450a", size = 4239653, upload-time = "2026-02-06T13:50:02.88Z" }, + { url = "https://files.pythonhosted.org/packages/10/b4/d9c3885987afac6e20df4cb7db14e3b0d5a08a77ae4916488254ebac4d0b/prek-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:0161b5f584f9e7f416d6cf40a17b98f17953050ff8d8350ec60f20fe966b86b6", size = 4595101, upload-time = "2026-02-06T13:49:49.813Z" }, + { url = "https://files.pythonhosted.org/packages/21/a6/1a06473ed83dbc898de22838abdb13954e2583ce229f857f61828384634c/prek-0.3.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e641e8533bca38797eebb49aa89ed0e8db0e61225943b27008c257e3af4d631", size = 4521978, upload-time = "2026-02-06T13:49:41.266Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5e/c38390d5612e6d86b32151c1d2fdab74a57913473193591f0eb00c894c21/prek-0.3.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfca1810d49d3f9ef37599c958c4e716bc19a1d78a7e88cbdcb332e0b008994f", size = 4829108, upload-time = "2026-02-06T13:49:44.598Z" }, + { url = "https://files.pythonhosted.org/packages/80/a6/cecce2ab623747ff65ed990bb0d95fa38449ee19b348234862acf9392fff/prek-0.3.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d69d754299a95a85dc20196f633232f306bee7e7c8cba61791f49ce70404ec", size = 5357520, upload-time = "2026-02-06T13:49:48.512Z" }, + { url = "https://files.pythonhosted.org/packages/a5/18/d6bcb29501514023c76d55d5cd03bdbc037737c8de8b6bc41cdebfb1682c/prek-0.3.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:539dcb90ad9b20837968539855df6a29493b328a1ae87641560768eed4f313b0", size = 4852635, upload-time = "2026-02-06T13:49:58.347Z" }, + { url = "https://files.pythonhosted.org/packages/1b/0a/ae46f34ba27ba87aea5c9ad4ac9cd3e07e014fd5079ae079c84198f62118/prek-0.3.2-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:1998db3d0cbe243984736c82232be51318f9192e2433919a6b1c5790f600b5fd", size = 4599484, upload-time = "2026-02-06T13:49:43.296Z" }, + { url = "https://files.pythonhosted.org/packages/1a/a9/73bfb5b3f7c3583f9b0d431924873928705cdef6abb3d0461c37254a681b/prek-0.3.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:07ab237a5415a3e8c0db54de9d63899bcd947624bdd8820d26f12e65f8d19eb7", size = 4657694, upload-time = "2026-02-06T13:50:01.074Z" }, + { url = "https://files.pythonhosted.org/packages/a7/bc/0994bc176e1a80110fad3babce2c98b0ac4007630774c9e18fc200a34781/prek-0.3.2-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:0ced19701d69c14a08125f14a5dd03945982edf59e793c73a95caf4697a7ac30", size = 4509337, upload-time = "2026-02-06T13:49:54.891Z" }, + { url = "https://files.pythonhosted.org/packages/f9/13/e73f85f65ba8f626468e5d1694ab3763111513da08e0074517f40238c061/prek-0.3.2-py3-none-musllinux_1_1_i686.whl", hash = "sha256:ffb28189f976fa111e770ee94e4f298add307714568fb7d610c8a7095cb1ce59", size = 4697350, upload-time = "2026-02-06T13:50:04.526Z" }, + { url = "https://files.pythonhosted.org/packages/14/47/98c46dcd580305b9960252a4eb966f1a7b1035c55c363f378d85662ba400/prek-0.3.2-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f63134b3eea14421789a7335d86f99aee277cb520427196f2923b9260c60e5c5", size = 4955860, upload-time = "2026-02-06T13:49:56.581Z" }, + { url = "https://files.pythonhosted.org/packages/73/42/1bb4bba3ff47897df11e9dfd774027cdfa135482c961a54e079af0faf45a/prek-0.3.2-py3-none-win32.whl", hash = "sha256:58c806bd1344becd480ef5a5ba348846cc000af0e1fbe854fef91181a2e06461", size = 4267619, upload-time = "2026-02-06T13:49:39.503Z" }, + { url = "https://files.pythonhosted.org/packages/97/11/6665f47a7c350d83de17403c90bbf7a762ef50876ece456a86f64f46fbfb/prek-0.3.2-py3-none-win_amd64.whl", hash = "sha256:70114b48e9eb8048b2c11b4c7715ce618529c6af71acc84dd8877871a2ef71a6", size = 4624324, upload-time = "2026-02-06T13:49:45.922Z" }, + { url = "https://files.pythonhosted.org/packages/22/e7/740997ca82574d03426f897fd88afe3fc8a7306b8c7ea342a8bc1c538488/prek-0.3.2-py3-none-win_arm64.whl", hash = "sha256:9144d176d0daa2469a25c303ef6f6fa95a8df015eb275232f5cb53551ecefef0", size = 4336008, upload-time = "2026-02-06T13:49:52.27Z" }, ] [[package]] @@ -4896,19 +4732,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, ] -[[package]] -name = "py2docfx" -version = "0.1.23" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "sphinx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "wheel", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/1f/9190016955e5ecdd87053d0609e72cf75eb6fe6002e06f1840ceb60eb68e/py2docfx-0.1.23-py3-none-any.whl", hash = "sha256:92eec60f8abb0426722644c1a636d1ab9ea7144a69d5cd0464f944dd03b5e5b2", size = 11339155, upload-time = "2026-01-20T10:34:49.458Z" }, -] - [[package]] name = "pyarrow" version = "23.0.0" @@ -5286,19 +5109,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, ] -[[package]] -name = "pytest-env" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/13/12/9c87d0ca45d5992473208bcef2828169fa7d39b8d7fc6e3401f5c08b8bf7/pytest_env-1.2.0.tar.gz", hash = "sha256:475e2ebe8626cee01f491f304a74b12137742397d6c784ea4bc258f069232b80", size = 8973, upload-time = "2025-10-09T19:15:47.42Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/98/822b924a4a3eb58aacba84444c7439fce32680592f394de26af9c76e2569/pytest_env-1.2.0-py3-none-any.whl", hash = "sha256:d7e5b7198f9b83c795377c09feefa45d56083834e60d04767efd64819fc9da00", size = 6251, upload-time = "2025-10-09T19:15:46.077Z" }, -] - [[package]] name = "pytest-retry" version = "1.7.0" @@ -5521,7 +5331,7 @@ wheels = [ [[package]] name = "redisvl" -version = "0.13.2" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonpath-ng", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5534,9 +5344,9 @@ dependencies = [ { name = "redis", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tenacity", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/81/d6/8f3235b272e3a2370698d7524aad2dec15f53c5be5d6726ba41056844f69/redisvl-0.13.2.tar.gz", hash = "sha256:f34c4350922ac469c45d90b5db65c49950e6aa8706331931b000f631ff9a0f4a", size = 737736, upload-time = "2025-12-19T09:22:07.787Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/45/1c5b308f68b01c4e33590a8e1445f43c51292917b28c2def8deaa5b3dc5b/redisvl-0.14.0.tar.gz", hash = "sha256:7a84c46858dbc86943e64ffe8590013684d03d79b72a634d10c02ce5d1c02335", size = 759829, upload-time = "2026-02-06T15:48:19.384Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/93/81ea5c45637ce7fe2fdaf214d5e1b91afe96a472edeb9b659e24d3710dfb/redisvl-0.13.2-py3-none-any.whl", hash = "sha256:dd998c6acc54f13526d464ad6b6e6f0c4cf6985fb2c7a1655bdf8ed8e57a4c01", size = 192760, upload-time = "2025-12-19T09:22:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/24/e9/264455caf42501b2b0747ac4819c7d0a2b458fad5e4e1f7610b6383d6d74/redisvl-0.14.0-py3-none-any.whl", hash = "sha256:85ec38f414427260da82ef20653a62d4c2626b97672c5c950616e5dde3cf0d0b", size = 196705, upload-time = "2026-02-06T15:48:17.636Z" }, ] [[package]] @@ -6248,11 +6058,11 @@ wheels = [ [[package]] name = "setuptools" -version = "80.10.2" +version = "82.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/95/faf61eb8363f26aa7e1d762267a8d602a1b26d4f3a1e758e92cb3cb8b054/setuptools-80.10.2.tar.gz", hash = "sha256:8b0e9d10c784bf7d262c4e5ec5d4ec94127ce206e8738f29a437945fbc219b70", size = 1200343, upload-time = "2026-01-25T22:38:17.252Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/f3/748f4d6f65d1756b9ae577f329c951cda23fb900e4de9f70900ced962085/setuptools-82.0.0.tar.gz", hash = "sha256:22e0a2d69474c6ae4feb01951cb69d515ed23728cf96d05513d36e42b62b37cb", size = 1144893, upload-time = "2026-02-08T15:08:40.206Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/b8/f1f62a5e3c0ad2ff1d189590bfa4c46b4f3b6e49cef6f26c6ee4e575394d/setuptools-80.10.2-py3-none-any.whl", hash = "sha256:95b30ddfb717250edb492926c92b5221f7ef3fbcc2b07579bcd4a27da21d0173", size = 1064234, upload-time = "2026-01-25T22:38:15.216Z" }, + { url = "https://files.pythonhosted.org/packages/e1/c6/76dc613121b793286a3f91621d7b75a2b493e0390ddca50f11993eadf192/setuptools-82.0.0-py3-none-any.whl", hash = "sha256:70b18734b607bd1da571d097d236cfcfacaf01de45717d59e6e04b96877532e0", size = 1003468, upload-time = "2026-02-08T15:08:38.723Z" }, ] [[package]] @@ -6282,15 +6092,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] -[[package]] -name = "snowballstemmer" -version = "3.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, -] - [[package]] name = "soundfile" version = "0.12.1" @@ -6309,87 +6110,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/50/ff/26a4ee48d0b66625a4e4028a055b9f25bc9d7c7b2d17d21a45137621a50d/soundfile-0.12.1-py2.py3-none-win_amd64.whl", hash = "sha256:0d86924c00b62552b650ddd28af426e3ff2d4dc2e9047dae5b3d8452e0a49a77", size = 1009109, upload-time = "2023-02-15T15:37:29.41Z" }, ] -[[package]] -name = "sphinx" -version = "6.1.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "alabaster", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "docutils", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "imagesize", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "snowballstemmer", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "sphinxcontrib-applehelp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "sphinxcontrib-devhelp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "sphinxcontrib-htmlhelp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "sphinxcontrib-jsmath", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "sphinxcontrib-qthelp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "sphinxcontrib-serializinghtml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/db/0b/a0f60c4abd8a69bd5b0d20edde8a8d8d9d4ca825bbd920d328d248fd0290/Sphinx-6.1.3.tar.gz", hash = "sha256:0dac3b698538ffef41716cf97ba26c1c7788dba73ce6f150c1ff5b4720786dd2", size = 6663266, upload-time = "2023-01-10T15:58:38.349Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/2c/22a20486cad91a66f4f70bd88c20c8bb306ae719cbba93d7debae7efa80d/sphinx-6.1.3-py3-none-any.whl", hash = "sha256:807d1cb3d6be87eb78a381c3e70ebd8d346b9a25f3753e9947e866b2786865fc", size = 3027954, upload-time = "2023-01-10T15:58:34.907Z" }, -] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, -] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, -] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, -] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, -] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, -] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, -] - [[package]] name = "sqlalchemy" version = "2.0.46" @@ -6461,15 +6181,15 @@ wheels = [ [[package]] name = "starlette" -version = "0.50.0" +version = "0.52.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, + { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, ] [[package]] @@ -6529,11 +6249,11 @@ dependencies = [ [[package]] name = "tenacity" -version = "9.1.3" +version = "9.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1e/4a/c3357c8742f361785e3702bb4c9c68c4cb37a80aa657640b820669be5af1/tenacity-9.1.3.tar.gz", hash = "sha256:a6724c947aa717087e2531f883bde5c9188f603f6669a9b8d54eb998e604c12a", size = 49002, upload-time = "2026-02-05T06:33:12.866Z" } +sdist = { url = "https://files.pythonhosted.org/packages/47/c6/ee486fd809e357697ee8a44d3d69222b344920433d3b6666ccd9b374630c/tenacity-9.1.4.tar.gz", hash = "sha256:adb31d4c263f2bd041081ab33b498309a57c77f9acf2db65aadf0898179cf93a", size = 49413, upload-time = "2026-02-07T10:45:33.841Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/6b/cdc85edb15e384d8e934aad89638cc8646e118c80de94c60125d0fc0a185/tenacity-9.1.3-py3-none-any.whl", hash = "sha256:51171cfc6b8a7826551e2f029426b10a6af189c5ac6986adcd7eb36d42f17954", size = 28858, upload-time = "2026-02-05T06:33:11.219Z" }, + { url = "https://files.pythonhosted.org/packages/d7/c1/eb8f9debc45d3b7918a32ab756658a0904732f75e555402972246b0b8e71/tenacity-9.1.4-py3-none-any.whl", hash = "sha256:6095a360c919085f28c6527de529e76a06ad89b23659fa881ae0649b867a9d55", size = 28926, upload-time = "2026-02-07T10:45:32.24Z" }, ] [[package]] @@ -6894,21 +6614,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload-time = "2024-10-14T23:38:10.888Z" }, ] -[[package]] -name = "virtualenv" -version = "20.36.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "distlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "platformdirs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, -] - [[package]] name = "watchdog" version = "6.0.0" @@ -7012,18 +6717,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ad/e4/8d97cca767bcc1be76d16fb76951608305561c6e056811587f36cb1316a8/werkzeug-3.1.5-py3-none-any.whl", hash = "sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc", size = 225025, upload-time = "2026-01-08T17:49:21.859Z" }, ] -[[package]] -name = "wheel" -version = "0.46.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/89/24/a2eb353a6edac9a0303977c4cb048134959dd2a51b48a269dfc9dde00c8a/wheel-0.46.3.tar.gz", hash = "sha256:e3e79874b07d776c40bd6033f8ddf76a7dad46a7b8aa1b2787a83083519a1803", size = 60605, upload-time = "2026-01-22T12:39:49.136Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/87/22/b76d483683216dde3d67cba61fb2444be8d5be289bf628c13fc0fd90e5f9/wheel-0.46.3-py3-none-any.whl", hash = "sha256:4b399d56c9d9338230118d705d9737a2a468ccca63d5e813e2a4fc7815d8bc4d", size = 30557, upload-time = "2026-01-22T12:39:48.099Z" }, -] - [[package]] name = "win32-setctime" version = "1.2.0"