From 65fc9ae0ac07e2e376408a3b7cc396299ee51c70 Mon Sep 17 00:00:00 2001 From: Sergio Herrera Date: Fri, 10 Apr 2026 15:13:02 +0200 Subject: [PATCH 01/20] Remove Mechanical Markdown Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- AGENTS.md | 24 ++- CLAUDE.md | 10 ++ README.md | 8 +- examples/AGENTS.md | 136 ++------------ examples/validate.sh | 4 - pyproject.toml | 5 + tests/integration/conftest.py | 166 ++++++++++++++++++ tests/integration/test_configuration.py | 51 ++++++ tests/integration/test_conversation.py | 28 +++ tests/integration/test_crypto.py | 64 +++++++ tests/integration/test_demo_actor.py | 45 +++++ tests/integration/test_distributed_lock.py | 21 +++ tests/integration/test_error_handling.py | 22 +++ tests/integration/test_grpc_proxying.py | 22 +++ tests/integration/test_invoke_binding.py | 66 +++++++ tests/integration/test_invoke_custom_data.py | 29 +++ tests/integration/test_invoke_simple.py | 36 ++++ tests/integration/test_jobs.py | 50 ++++++ .../test_langgraph_checkpointer.py | 21 +++ tests/integration/test_metadata.py | 22 +++ tests/integration/test_pubsub_simple.py | 43 +++++ tests/integration/test_pubsub_streaming.py | 69 ++++++++ .../test_pubsub_streaming_async.py | 69 ++++++++ tests/integration/test_secret_store.py | 33 ++++ tests/integration/test_state_store.py | 26 +++ tests/integration/test_state_store_query.py | 55 ++++++ tests/integration/test_w3c_tracing.py | 25 +++ tests/integration/test_workflow.py | 46 +++++ tox.ini | 63 ++----- 29 files changed, 1066 insertions(+), 193 deletions(-) delete mode 100755 examples/validate.sh create mode 100644 tests/integration/conftest.py create mode 100644 tests/integration/test_configuration.py create mode 100644 tests/integration/test_conversation.py create mode 100644 tests/integration/test_crypto.py create mode 100644 tests/integration/test_demo_actor.py create mode 100644 tests/integration/test_distributed_lock.py create mode 100644 tests/integration/test_error_handling.py create mode 100644 tests/integration/test_grpc_proxying.py create mode 100644 tests/integration/test_invoke_binding.py create mode 100644 tests/integration/test_invoke_custom_data.py create mode 100644 tests/integration/test_invoke_simple.py create mode 100644 tests/integration/test_jobs.py create mode 100644 tests/integration/test_langgraph_checkpointer.py create mode 100644 tests/integration/test_metadata.py create mode 100644 tests/integration/test_pubsub_simple.py create mode 100644 tests/integration/test_pubsub_streaming.py create mode 100644 tests/integration/test_pubsub_streaming_async.py create mode 100644 tests/integration/test_secret_store.py create mode 100644 tests/integration/test_state_store.py create mode 100644 tests/integration/test_state_store_query.py create mode 100644 tests/integration/test_w3c_tracing.py create mode 100644 tests/integration/test_workflow.py diff --git a/AGENTS.md b/AGENTS.md index db0790606..27eed72a5 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -61,15 +61,14 @@ Each extension is a **separate PyPI package** with its own `setup.cfg`, `setup.p ## Examples (integration test suite) -The `examples/` directory serves as both user-facing documentation and the project's integration test suite. Examples are validated in CI using [mechanical-markdown](https://pypi.org/project/mechanical-markdown/), which executes bash code blocks from README files and asserts expected output. +The `examples/` directory serves as both user-facing documentation and the project's integration test suite. Examples are validated by pytest-based integration tests in `tests/integration/`. -**See `examples/AGENTS.md`** for the full guide on example structure, validation, mechanical-markdown STEP blocks, and how to add new examples. +**See `examples/AGENTS.md`** for the full guide on example structure and how to add new examples. Quick reference: ```bash -tox -e examples # Run all examples (needs Dapr runtime) -tox -e example-component -- state_store # Run a single example -cd examples && ./validate.sh state_store # Run directly +tox -e integration # Run all examples (needs Dapr runtime) +tox -e integration -- test_state_store.py # Run a single example ``` ## Python version support @@ -107,8 +106,8 @@ tox -e ruff # Run type checking tox -e type -# Validate examples (requires Dapr runtime) -tox -e examples +# Run integration tests / validate examples (requires Dapr runtime) +tox -e integration ``` To run tests directly without tox: @@ -190,9 +189,8 @@ When completing any task on this project, work through this checklist. Not every ### Examples (integration tests) - [ ] If you added a new user-facing feature or building block, add or update an example in `examples/` -- [ ] Ensure the example README has `` blocks with `expected_stdout_lines` so it is validated in CI -- [ ] If you added a new example, register it in `tox.ini` under `[testenv:examples]` -- [ ] If you changed output format of existing functionality, update `expected_stdout_lines` in affected example READMEs +- [ ] Add a corresponding pytest integration test in `tests/integration/` +- [ ] If you changed output format of existing functionality, update expected output in the affected integration tests - [ ] See `examples/AGENTS.md` for full details on writing examples ### Documentation @@ -204,7 +202,7 @@ When completing any task on this project, work through this checklist. Not every - [ ] Run `tox -e ruff` — linting must be clean - [ ] Run `tox -e py311` (or your Python version) — all unit tests must pass -- [ ] If you touched examples: `tox -e example-component -- ` to validate locally +- [ ] If you touched examples: `tox -e integration -- test_.py` to validate locally - [ ] Commits must be signed off for DCO: `git commit -s` ## Important files @@ -219,7 +217,7 @@ When completing any task on this project, work through this checklist. Not every | `dev-requirements.txt` | Development/test dependencies | | `dapr/version/__init__.py` | SDK version string | | `ext/*/setup.cfg` | Extension package metadata and dependencies | -| `examples/validate.sh` | Entry point for mechanical-markdown example validation | +| `tests/integration/` | Pytest-based integration tests that validate examples | ## Gotchas @@ -228,6 +226,6 @@ When completing any task on this project, work through this checklist. Not every - **Extension independence**: Each extension is a separate PyPI package. Core SDK changes should not break extensions; extension changes should not require core SDK changes unless intentional. - **DCO signoff**: PRs will be blocked by the DCO bot if commits lack `Signed-off-by`. Always use `git commit -s`. - **Ruff version pinned**: Dev requirements pin `ruff === 0.14.1`. Use this exact version to match CI. -- **Examples are integration tests**: Changing output format (log messages, print statements) can break example validation. Always check `expected_stdout_lines` in example READMEs when modifying user-visible output. +- **Examples are integration tests**: Changing output format (log messages, print statements) can break integration tests. Always check expected output in `tests/integration/` when modifying user-visible output. - **Background processes in examples**: Examples that start background services (servers, subscribers) must include a cleanup step to stop them, or CI will hang. - **Workflow is the most active area**: See `ext/dapr-ext-workflow/AGENTS.md` for workflow-specific architecture and constraints. diff --git a/CLAUDE.md b/CLAUDE.md index 43c994c2d..84e2b1166 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1 +1,11 @@ @AGENTS.md + +Use pathlib instead of os.path. +Use modern Python (3.10+) features. +Make all code strongly typed. +Keep conditional nesting to a minimum, and use guard clauses when possible. +Aim for medium "visual complexity": use intermediate variables to store results of nested/complex function calls, but don't create a new variable for everything. +Avoid comments unless there is an unusual gotcha, a complex algorithm or anything an experienced code reviewer needs to be aware of. Focus on making better Google-style docstrings instead. + +The user is not always right. Be skeptical and do not blindly comply if something doesn't make sense. +Code should be production-ready. \ No newline at end of file diff --git a/README.md b/README.md index e212cbd43..333be6933 100644 --- a/README.md +++ b/README.md @@ -121,19 +121,17 @@ tox -e py311 tox -e type ``` -8. Run examples +8. Run integration tests (validates the examples) ```bash -tox -e examples +tox -e integration ``` -[Dapr Mechanical Markdown](https://github.com/dapr/mechanical-markdown) is used to test the examples. - If you need to run the examples against a pre-released version of the runtime, you can use the following command: - Get your daprd runtime binary from [here](https://github.com/dapr/dapr/releases) for your platform. - Copy the binary to your dapr home folder at $HOME/.dapr/bin/daprd. Or using dapr cli directly: `dapr init --runtime-version ` -- Now you can run the example with `tox -e examples`. +- Now you can run the examples with `tox -e integration`. ## Documentation diff --git a/examples/AGENTS.md b/examples/AGENTS.md index 677470d60..5dcbdd4ec 100644 --- a/examples/AGENTS.md +++ b/examples/AGENTS.md @@ -1,26 +1,22 @@ # AGENTS.md — Dapr Python SDK Examples -The `examples/` directory serves as both **user-facing documentation** and the project's **integration test suite**. Each example is a self-contained application validated automatically in CI using [mechanical-markdown](https://pypi.org/project/mechanical-markdown/), which executes bash code blocks embedded in README files and asserts expected output. +The `examples/` directory serves as both **user-facing documentation** and the project's **integration test suite**. Each example is a self-contained application validated by pytest-based integration tests in `tests/integration/`. ## How validation works -1. `examples/validate.sh` is the entry point — it `cd`s into an example directory and runs `mm.py -l README.md` -2. `mm.py` (mechanical-markdown) parses `` HTML comment blocks in the README -3. Each STEP block wraps a fenced bash code block that gets executed -4. stdout/stderr is captured and checked against `expected_stdout_lines` / `expected_stderr_lines` -5. Validation fails if any expected output line is missing +1. Each example has a corresponding test file in `tests/integration/` (e.g., `test_state_store.py`) +2. Tests use a `DaprRunner` helper (defined in `conftest.py`) that wraps `dapr run` commands +3. `DaprRunner.run()` executes a command and captures stdout; `DaprRunner.start()`/`stop()` manage background services +4. Tests assert that expected output lines appear in the captured output Run examples locally (requires a running Dapr runtime via `dapr init`): ```bash # All examples -tox -e examples +tox -e integration # Single example -tox -e example-component -- state_store - -# Or directly -cd examples && ./validate.sh state_store +tox -e integration -- test_state_store.py ``` In CI (`validate_examples.yaml`), examples run on all supported Python versions (3.10-3.14) on Ubuntu with a full Dapr runtime including Docker, Redis, and (for LLM examples) Ollama. @@ -31,7 +27,7 @@ Each example follows this pattern: ``` examples// -├── README.md # Documentation + mechanical-markdown STEP blocks (REQUIRED) +├── README.md # Documentation (REQUIRED) ├── *.py # Python application files ├── requirements.txt # Dependencies (optional — many examples rely on the installed SDK) ├── components/ # Dapr component YAML configs (if needed) @@ -46,53 +42,6 @@ Common Python file naming conventions: - Client/caller side: `*-caller.py`, `publisher.py`, `*_client.py` - Standalone: `state_store.py`, `crypto.py`, etc. -## Mechanical-markdown STEP block format - -STEP blocks are HTML comments wrapping fenced bash code in the README: - -````markdown - - -```bash -dapr run --app-id myapp --resources-path ./components/ python3 example.py -``` - - -```` - -### STEP block attributes - -| Attribute | Description | -|-----------|-------------| -| `name` | Descriptive name for the step | -| `expected_stdout_lines` | List of strings that must appear in stdout | -| `expected_stderr_lines` | List of strings that must appear in stderr | -| `background` | `true` to run in background (for long-running services) | -| `sleep` | Seconds to wait after starting before moving to the next step | -| `timeout_seconds` | Max seconds before the step is killed | -| `output_match_mode` | `substring` for partial matching (default is exact) | -| `match_order` | `none` if output lines can appear in any order | - -### Tips for writing STEP blocks - -- Use `background: true` with `sleep:` for services that need to stay running (servers, subscribers) -- Use `timeout_seconds:` to prevent CI hangs on broken examples -- Use `output_match_mode: substring` when output contains timestamps or dynamic content -- Use `match_order: none` when multiple concurrent operations produce unpredictable ordering -- Always include a cleanup step (e.g., `dapr stop --app-id ...`) when using background processes -- Make `expected_stdout_lines` specific enough to validate correctness, but not so brittle they break on cosmetic changes -- Dapr prefixes app output with `== APP ==` — use this in expected lines - ## Dapr component YAML format Components in `components/` directories follow the standard Dapr resource format: @@ -182,69 +131,18 @@ The `workflow` example includes: `simple.py`, `task_chaining.py`, `fan_out_fan_i 1. Create a directory under `examples/` with a descriptive kebab-case name 2. Add Python source files and a `requirements.txt` referencing the needed SDK packages 3. Add Dapr component YAMLs in a `components/` subdirectory if the example uses state, pubsub, etc. -4. Write a `README.md` with: - - Introduction explaining what the example demonstrates - - Pre-requisites section (Dapr CLI, Python 3.10+, any special tools) - - Install instructions (`pip3 install dapr dapr-ext-grpc` etc.) - - Running instructions with `` blocks wrapping `dapr run` commands - - Expected output section - - Cleanup step to stop background processes -5. Register the example in `tox.ini` under `[testenv:examples]` commands: - ``` - ./validate.sh your-example-name - ``` -6. Test locally: `cd examples && ./validate.sh your-example-name` - -## Common README template - -```markdown -# Dapr [Building Block] Example - -This example demonstrates how to use the Dapr [building block] API with the Python SDK. - -## Pre-requisites - -- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started) -- Python 3.10+ - -## Install Dapr python-SDK - -\`\`\`bash -pip3 install dapr dapr-ext-grpc -\`\`\` - -## Run the example - - - -\`\`\`bash -dapr run --app-id myapp --resources-path ./components/ python3 example.py -\`\`\` - - - -## Cleanup - - - -\`\`\`bash -dapr stop --app-id myapp -\`\`\` - - -``` +4. Write a `README.md` with introduction, pre-requisites, install instructions, and running instructions +5. Add a corresponding test in `tests/integration/test_.py`: + - Use the `@pytest.mark.example_dir('')` marker to set the working directory + - Use `dapr.run()` for scripts that exit on their own, `dapr.start()`/`dapr.stop()` for long-running services + - Assert expected output lines appear in the captured output +6. Test locally: `tox -e integration -- test_.py` ## Gotchas -- **Output format changes break CI**: If you modify print statements or log output in SDK code, check whether any example's `expected_stdout_lines` depend on that output. -- **Background processes must be cleaned up**: Missing cleanup steps cause CI to hang. +- **Output format changes break tests**: If you modify print statements or log output in SDK code, check whether any integration test's expected lines depend on that output. +- **Background processes must be cleaned up**: The `DaprRunner` fixture handles cleanup on teardown, but tests should still call `dapr.stop()` to capture output. - **Dapr prefixes output**: Application stdout appears as `== APP == ` when run via `dapr run`. - **Redis is available in CI**: The CI environment has Redis running on `localhost:6379` — most component YAMLs use this. - **Some examples need special setup**: `crypto` generates keys, `configuration` seeds Redis, `conversation` needs LLM config — check individual READMEs. +- **Infinite-loop example scripts**: Some example scripts (e.g., `invoke-caller.py`) have `while True` loops for demo purposes. Integration tests must either bypass these with HTTP API calls or use `dapr.run(until=...)` for early termination. \ No newline at end of file diff --git a/examples/validate.sh b/examples/validate.sh deleted file mode 100755 index 202fcaedd..000000000 --- a/examples/validate.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh -echo "Home: $HOME" - -cd $1 && mm.py -l README.md diff --git a/pyproject.toml b/pyproject.toml index 2186f5074..f2dc537f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,3 +22,8 @@ ignore = ["E501","E203", "E712", "E722", "E713"] [tool.ruff.format] quote-style = 'single' + +[tool.pytest.ini_options] +markers = [ + 'example_dir(name): set the example directory for the dapr fixture', +] diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 000000000..2cc7f33e9 --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,166 @@ +import signal +import subprocess +import threading +import time +from pathlib import Path +from typing import Any, Generator + +import pytest + +REPO_ROOT = Path(__file__).resolve().parent.parent.parent +EXAMPLES_DIR = REPO_ROOT / 'examples' + + +class DaprRunner: + """Helper to run `dapr run` commands and capture output.""" + + def __init__(self, cwd: Path) -> None: + self._cwd = cwd + self._bg: subprocess.Popen[str] | None = None + self._bg_lines: list[str] = [] + self._bg_reader: threading.Thread | None = None + + def _spawn(self, args: str) -> subprocess.Popen[str]: + return subprocess.Popen( + f'dapr run {args}', + shell=True, + cwd=self._cwd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + ) + + @staticmethod + def _terminate(proc: subprocess.Popen[str]) -> None: + if proc.poll() is not None: + return + proc.send_signal(signal.SIGTERM) + try: + proc.wait(timeout=10) + except subprocess.TimeoutExpired: + proc.kill() + proc.wait() + + def run(self, args: str, *, timeout: int = 30, until: list[str] | None = None) -> str: + """Run a `dapr run` command, stream output, and return it. + + Args: + args: Arguments passed to ``dapr run``. + timeout: Maximum seconds to wait before killing the process. + until: If provided, the process is terminated as soon as every + string in this list has appeared in the accumulated output. + """ + proc = self._spawn(args) + lines: list[str] = [] + remaining = set(until) if until else set() + assert proc.stdout is not None + + # Kill the process if it exceeds the timeout. A background timer is + # needed because `for line in proc.stdout` blocks indefinitely when + # the child never exits. + timer = threading.Timer(timeout, proc.kill) + timer.start() + + try: + for line in proc.stdout: + print(line, end='', flush=True) + lines.append(line) + if remaining: + output_so_far = ''.join(lines) + remaining = {exp for exp in remaining if exp not in output_so_far} + if not remaining: + break + finally: + timer.cancel() + self._terminate(proc) + + return ''.join(lines) + + def start(self, args: str, *, wait: int = 5) -> subprocess.Popen[str]: + """Start a `dapr run` command in the background and return the handle. + + A reader thread continuously drains stdout so the pipe buffer never + fills up (which would block the child process). + """ + proc = self._spawn(args) + self._bg = proc + self._bg_lines = [] + + def drain() -> None: + assert proc.stdout is not None + for line in proc.stdout: + print(line, end='', flush=True) + self._bg_lines.append(line) + + self._bg_reader = threading.Thread(target=drain, daemon=True) + self._bg_reader.start() + time.sleep(wait) + return proc + + def stop(self, proc: subprocess.Popen[str]) -> str: + """Stop a background process and return its captured output.""" + self._terminate(proc) + self._bg = None + if self._bg_reader is not None: + self._bg_reader.join(timeout=5) + self._bg_reader = None + output = ''.join(self._bg_lines) + self._bg_lines = [] + return output + + def cleanup(self) -> None: + """Stop the background process if still running (teardown safety net).""" + if self._bg is not None: + self._terminate(self._bg) + self._bg = None + if self._bg_reader is not None: + self._bg_reader.join(timeout=5) + self._bg_reader = None + self._bg_lines = [] + + +def assert_lines_in_output(output: str, expected_lines: list[str], *, ordered: bool = True) -> None: + """Assert that each expected line appears as a substring in the output. + + Args: + output: The combined stdout/stderr string. + expected_lines: List of strings that must appear in the output. + ordered: If True, the expected lines must appear in order. + """ + missing = [line for line in expected_lines if line not in output] + assert not missing, ( + f'Missing expected lines in output:\n Missing: {missing}\n Output:\n{output}' + ) + + if not ordered: + return + + positions = [output.index(line) for line in expected_lines] + out_of_order = [ + (expected_lines[i], expected_lines[i + 1]) + for i in range(len(positions) - 1) + if positions[i] > positions[i + 1] + ] + assert not out_of_order, ( + f'Lines appeared out of order:\n Out of order pairs: {out_of_order}\n Output:\n{output}' + ) + + +@pytest.fixture +def dapr(request: pytest.FixtureRequest) -> Generator[DaprRunner, Any, None]: + """Provides a DaprRunner scoped to an example directory. + + Use the ``example_dir`` marker to select which example: + + @pytest.mark.example_dir('state_store') + def test_something(dapr): + ... + + Defaults to the examples root if no marker is set. + """ + marker = request.node.get_closest_marker('example_dir') + cwd = EXAMPLES_DIR / marker.args[0] if marker else EXAMPLES_DIR + + runner = DaprRunner(cwd) + yield runner + runner.cleanup() diff --git a/tests/integration/test_configuration.py b/tests/integration/test_configuration.py new file mode 100644 index 000000000..9660e142d --- /dev/null +++ b/tests/integration/test_configuration.py @@ -0,0 +1,51 @@ +import subprocess +import time + +import pytest + + +EXPECTED_LINES = [ + 'Got key=orderId1 value=100 version=1 metadata={}', + 'Got key=orderId2 value=200 version=1 metadata={}', + 'Subscribe key=orderId2 value=210 version=2 metadata={}', + 'Unsubscribed successfully? True', +] + + +@pytest.fixture() +def redis_config(): + """Seed configuration values in Redis before the test.""" + subprocess.run( + 'docker exec dapr_redis redis-cli SET orderId1 "100||1"', + shell=True, + check=True, + capture_output=True, + ) + subprocess.run( + 'docker exec dapr_redis redis-cli SET orderId2 "200||1"', + shell=True, + check=True, + capture_output=True, + ) + + +@pytest.mark.example_dir('configuration') +def test_configuration(dapr, redis_config): + proc = dapr.start( + '--app-id configexample --resources-path components/ -- python3 configuration.py', + wait=5, + ) + # Update Redis to trigger the subscription notification + subprocess.run( + 'docker exec dapr_redis redis-cli SET orderId2 "210||2"', + shell=True, + check=True, + capture_output=True, + ) + # configuration.py sleeps 10s after subscribing before it unsubscribes. + # Wait long enough for the full script to finish. + time.sleep(10) + + output = dapr.stop(proc) + for line in EXPECTED_LINES: + assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_conversation.py b/tests/integration/test_conversation.py new file mode 100644 index 000000000..76ec5ade0 --- /dev/null +++ b/tests/integration/test_conversation.py @@ -0,0 +1,28 @@ +import pytest + +EXPECTED_LINES = [ + "Result: What's Dapr?", + 'Give a brief overview.', +] + + +@pytest.mark.example_dir('conversation') +def test_conversation_alpha1(dapr): + output = dapr.run( + '--app-id conversation-alpha1 --log-level debug --resources-path ./config ' + '-- python3 conversation_alpha1.py', + timeout=60, + ) + for line in EXPECTED_LINES: + assert line in output, f'Missing in output: {line}' + + +@pytest.mark.example_dir('conversation') +def test_conversation_alpha2(dapr): + output = dapr.run( + '--app-id conversation-alpha2 --log-level debug --resources-path ./config ' + '-- python3 conversation_alpha2.py', + timeout=60, + ) + for line in EXPECTED_LINES: + assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_crypto.py b/tests/integration/test_crypto.py new file mode 100644 index 000000000..1b7a4c527 --- /dev/null +++ b/tests/integration/test_crypto.py @@ -0,0 +1,64 @@ +import shutil +import subprocess +from pathlib import Path + +import pytest + +REPO_ROOT = Path(__file__).resolve().parent.parent.parent +CRYPTO_DIR = REPO_ROOT / 'examples' / 'crypto' + +EXPECTED_COMMON = [ + 'Running encrypt/decrypt operation on string', + 'Decrypted the message, got 24 bytes', + 'The secret is "passw0rd"', + 'Running encrypt/decrypt operation on file', + 'Wrote encrypted data to encrypted.out', + 'Wrote decrypted data to decrypted.out.jpg', +] + + +@pytest.fixture() +def crypto_keys(): + keys_dir = CRYPTO_DIR / 'keys' + keys_dir.mkdir(exist_ok=True) + subprocess.run( + 'openssl genpkey -algorithm RSA -pkeyopt rsa_keygen_bits:4096 ' + '-out keys/rsa-private-key.pem', + shell=True, + cwd=CRYPTO_DIR, + check=True, + capture_output=True, + ) + subprocess.run( + 'openssl rand -out keys/symmetric-key-256 32', + shell=True, + cwd=CRYPTO_DIR, + check=True, + capture_output=True, + ) + yield + shutil.rmtree(keys_dir, ignore_errors=True) + (CRYPTO_DIR / 'encrypted.out').unlink(missing_ok=True) + (CRYPTO_DIR / 'decrypted.out.jpg').unlink(missing_ok=True) + + +@pytest.mark.example_dir('crypto') +def test_crypto(dapr, crypto_keys): + output = dapr.run( + '--app-id crypto --resources-path ./components/ -- python3 crypto.py', + timeout=30, + ) + assert 'Running gRPC client synchronous API' in output + for line in EXPECTED_COMMON: + assert line in output, f'Missing in output: {line}' + + +@pytest.mark.example_dir('crypto') +def test_crypto_async(dapr, crypto_keys): + output = dapr.run( + '--app-id crypto-async --resources-path ./components/ -- python3 crypto-async.py', + timeout=30, + ) + assert 'Running gRPC client asynchronous API' in output + for line in EXPECTED_COMMON: + assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_demo_actor.py b/tests/integration/test_demo_actor.py new file mode 100644 index 000000000..bef8e5476 --- /dev/null +++ b/tests/integration/test_demo_actor.py @@ -0,0 +1,45 @@ +import pytest + +EXPECTED_SERVICE = [ + 'Activate DemoActor actor!', + 'has_value: False', + "set_my_data: {'data': 'new_data'}", + 'has_value: True', + 'set reminder to True', + 'set reminder is done', + 'set_timer to True', + 'set_timer is done', + 'clear_my_data', +] + +EXPECTED_CLIENT = [ + 'call actor method via proxy.invoke_method()', + "b'null'", + 'call actor method using rpc style', + 'None', + 'call SetMyData actor method to save the state', + 'call GetMyData actor method to get the state', + 'Register reminder', + 'Register timer', + 'stop reminder', + 'stop timer', + 'clear actor state', +] + + +@pytest.mark.example_dir('demo_actor/demo_actor') +def test_demo_actor(dapr): + service = dapr.start( + '--app-id demo-actor --app-port 3000 -- uvicorn --port 3000 demo_actor_service:app', + wait=10, + ) + client_output = dapr.run( + '--app-id demo-client python3 demo_actor_client.py', + timeout=60, + ) + for line in EXPECTED_CLIENT: + assert line in client_output, f'Missing in client output: {line}' + + service_output = dapr.stop(service) + for line in EXPECTED_SERVICE: + assert line in service_output, f'Missing in service output: {line}' diff --git a/tests/integration/test_distributed_lock.py b/tests/integration/test_distributed_lock.py new file mode 100644 index 000000000..1d353484d --- /dev/null +++ b/tests/integration/test_distributed_lock.py @@ -0,0 +1,21 @@ +import pytest + +EXPECTED_LINES = [ + 'Will try to acquire a lock from lock store named [lockstore]', + 'The lock is for a resource named [example-lock-resource]', + 'The client identifier is [example-client-id]', + 'The lock will will expire in 60 seconds.', + 'Lock acquired successfully!!!', + 'We already released the lock so unlocking will not work.', + 'We tried to unlock it anyway and got back [UnlockResponseStatus.lock_does_not_exist]', +] + + +@pytest.mark.example_dir('distributed_lock') +def test_distributed_lock(dapr): + output = dapr.run( + '--app-id=locksapp --app-protocol grpc --resources-path components/ python3 lock.py', + timeout=10, + ) + for line in EXPECTED_LINES: + assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_error_handling.py b/tests/integration/test_error_handling.py new file mode 100644 index 000000000..68c46af0e --- /dev/null +++ b/tests/integration/test_error_handling.py @@ -0,0 +1,22 @@ +import pytest + +EXPECTED_LINES = [ + 'Status code: StatusCode.INVALID_ARGUMENT', + "Message: input key/keyPrefix 'key||' can't contain '||'", + 'Error code: DAPR_STATE_ILLEGAL_KEY', + 'Error info(reason): DAPR_STATE_ILLEGAL_KEY', + 'Resource info (resource type): state', + 'Resource info (resource name): statestore', + 'Bad request (field): key||', + "Bad request (description): input key/keyPrefix 'key||' can't contain '||'", +] + + +@pytest.mark.example_dir('error_handling') +def test_error_handling(dapr): + output = dapr.run( + '--resources-path components -- python3 error_handling.py', + timeout=10, + ) + for line in EXPECTED_LINES: + assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_grpc_proxying.py b/tests/integration/test_grpc_proxying.py new file mode 100644 index 000000000..a59f03685 --- /dev/null +++ b/tests/integration/test_grpc_proxying.py @@ -0,0 +1,22 @@ +import pytest + +EXPECTED_CALLER = [ + 'Greeter client received: Hello, you!', +] + + +@pytest.mark.example_dir('grpc_proxying') +def test_grpc_proxying(dapr): + receiver = dapr.start( + '--app-id invoke-receiver --app-protocol grpc --app-port 50051 ' + '--config config.yaml -- python invoke-receiver.py', + wait=5, + ) + caller_output = dapr.run( + '--app-id invoke-caller --dapr-grpc-port 50007 --config config.yaml -- python invoke-caller.py', + timeout=30, + ) + for line in EXPECTED_CALLER: + assert line in caller_output, f'Missing in caller output: {line}' + + dapr.stop(receiver) diff --git a/tests/integration/test_invoke_binding.py b/tests/integration/test_invoke_binding.py new file mode 100644 index 000000000..e2a39b2c7 --- /dev/null +++ b/tests/integration/test_invoke_binding.py @@ -0,0 +1,66 @@ +import json +import subprocess +import time +import urllib.request +from pathlib import Path + +import pytest + +REPO_ROOT = Path(__file__).resolve().parent.parent.parent +BINDING_DIR = REPO_ROOT / 'examples' / 'invoke-binding' + +EXPECTED_MESSAGES = [ + '{"id":1,"message":"hello world"}', + '{"id":2,"message":"hello world"}', + '{"id":3,"message":"hello world"}', +] + + +@pytest.fixture() +def kafka(): + subprocess.run( + 'docker compose -f ./docker-compose-single-kafka.yml up -d', + shell=True, + cwd=BINDING_DIR, + check=True, + capture_output=True, + ) + time.sleep(30) + yield + subprocess.run( + 'docker compose -f ./docker-compose-single-kafka.yml down', + shell=True, + cwd=BINDING_DIR, + capture_output=True, + ) + + +@pytest.mark.example_dir('invoke-binding') +def test_invoke_binding(dapr, kafka): + receiver = dapr.start( + '--app-id receiver --app-protocol grpc --app-port 50051 ' + '--dapr-http-port 3500 --resources-path ./components python3 invoke-input-binding.py', + wait=5, + ) + + # Publish through the receiver's sidecar (both scripts are infinite, + # so we reimplement the publisher here with a bounded loop). + for n in range(1, 4): + body = json.dumps( + { + 'operation': 'create', + 'data': {'id': n, 'message': 'hello world'}, + } + ).encode() + req = urllib.request.Request( + 'http://localhost:3500/v1.0/bindings/kafkaBinding', + data=body, + headers={'Content-Type': 'application/json'}, + ) + urllib.request.urlopen(req) + time.sleep(1) + + time.sleep(5) + receiver_output = dapr.stop(receiver) + for line in EXPECTED_MESSAGES: + assert line in receiver_output, f'Missing in receiver output: {line}' diff --git a/tests/integration/test_invoke_custom_data.py b/tests/integration/test_invoke_custom_data.py new file mode 100644 index 000000000..11acdc106 --- /dev/null +++ b/tests/integration/test_invoke_custom_data.py @@ -0,0 +1,29 @@ +import pytest + +EXPECTED_RECEIVER = [ + 'SOME_DATA', +] + +EXPECTED_CALLER = [ + 'isSuccess: true', + 'code: 200', + 'message: "Hello World - Success!"', +] + + +@pytest.mark.example_dir('invoke-custom-data') +def test_invoke_custom_data(dapr): + receiver = dapr.start( + '--app-id invoke-receiver --app-protocol grpc --app-port 50051 python3 invoke-receiver.py', + wait=5, + ) + caller_output = dapr.run( + '--app-id invoke-caller --app-protocol grpc python3 invoke-caller.py', + timeout=30, + ) + for line in EXPECTED_CALLER: + assert line in caller_output, f'Missing in caller output: {line}' + + receiver_output = dapr.stop(receiver) + for line in EXPECTED_RECEIVER: + assert line in receiver_output, f'Missing in receiver output: {line}' diff --git a/tests/integration/test_invoke_simple.py b/tests/integration/test_invoke_simple.py new file mode 100644 index 000000000..6852f29d1 --- /dev/null +++ b/tests/integration/test_invoke_simple.py @@ -0,0 +1,36 @@ +import json +import urllib.request + +import pytest + +EXPECTED_RECEIVER = [ + '{"id": 1, "message": "hello world"}', +] + + +@pytest.mark.example_dir('invoke-simple') +def test_invoke_simple(dapr): + receiver = dapr.start( + '--app-id invoke-receiver --app-protocol grpc --app-port 50051 ' + '--dapr-http-port 3500 python3 invoke-receiver.py', + wait=5, + ) + + # invoke-caller.py runs an infinite loop, so we invoke the method + # directly through the sidecar HTTP API with a single call. + req_data = json.dumps({'id': 1, 'message': 'hello world'}).encode() + req = urllib.request.Request( + 'http://localhost:3500/v1.0/invoke/invoke-receiver/method/my-method', + data=req_data, + headers={'Content-Type': 'application/json'}, + ) + with urllib.request.urlopen(req) as resp: + content_type = resp.headers.get('Content-Type', '') + body = resp.read().decode() + + assert 'text/plain' in content_type + assert 'INVOKE_RECEIVED' in body + + receiver_output = dapr.stop(receiver) + for line in EXPECTED_RECEIVER: + assert line in receiver_output, f'Missing in receiver output: {line}' diff --git a/tests/integration/test_jobs.py b/tests/integration/test_jobs.py new file mode 100644 index 000000000..61eb67560 --- /dev/null +++ b/tests/integration/test_jobs.py @@ -0,0 +1,50 @@ +import pytest + +EXPECTED_MANAGEMENT = [ + '0. Scheduling a simple job without data...', + 'Simple job scheduled successfully', + '1. Scheduling a recurring job with cron schedule...', + 'Recurring job scheduled successfully', + '2. Scheduling a one-time job with due_time...', + 'One-time job scheduled successfully', + '3. Scheduling jobs with failure policies...', + 'Job with drop failure policy scheduled successfully', + 'Job with constant retry policy scheduled successfully', + '4. Getting job details...', + 'Retrieved job details:', + '5. Cleaning up - deleting jobs...', + 'Deleted job: simple-job', + 'Deleted job: recurring-hello-job', + 'Deleted job: one-time-hello-job', + 'Deleted job: drop-policy-job', + 'Deleted job: retry-policy-job', +] + +EXPECTED_PROCESSING = [ + 'Dapr Jobs Example', + 'Starting gRPC server on port 50051...', + 'Scheduling jobs...', + 'hello-job scheduled', + 'data-job scheduled', + 'Jobs scheduled! Waiting for execution...', + 'Job event received: hello-job', + 'Data job event received: data-job', +] + + +@pytest.mark.example_dir('jobs') +def test_job_management(dapr): + output = dapr.run('--app-id jobs-example -- python3 job_management.py', timeout=30) + for line in EXPECTED_MANAGEMENT: + assert line in output, f'Missing in output: {line}' + + +@pytest.mark.example_dir('jobs') +def test_job_processing(dapr): + proc = dapr.start( + '--app-id jobs-workflow --app-protocol grpc --app-port 50051 python3 job_processing.py', + wait=15, + ) + output = dapr.stop(proc) + for line in EXPECTED_PROCESSING: + assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_langgraph_checkpointer.py b/tests/integration/test_langgraph_checkpointer.py new file mode 100644 index 000000000..93f758472 --- /dev/null +++ b/tests/integration/test_langgraph_checkpointer.py @@ -0,0 +1,21 @@ +import pytest + +langchain_ollama = pytest.importorskip('langchain_ollama', reason='langchain-ollama not installed') + +EXPECTED_LINES = [ + 'Add 3 and 4.', + '7', + '14', +] + + +@pytest.mark.example_dir('langgraph-checkpointer') +def test_langgraph_checkpointer(dapr): + proc = dapr.start( + '--app-id langgraph-checkpointer --app-port 5001 --dapr-grpc-port 5002 ' + '--resources-path ./components -- python3 agent.py', + wait=15, + ) + output = dapr.stop(proc) + for line in EXPECTED_LINES: + assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_metadata.py b/tests/integration/test_metadata.py new file mode 100644 index 000000000..5beef49da --- /dev/null +++ b/tests/integration/test_metadata.py @@ -0,0 +1,22 @@ +import pytest + +EXPECTED_LINES = [ + 'First, we will assign a new custom label to Dapr sidecar', + "Now, we will fetch the sidecar's metadata", + 'And this is what we got:', + 'application_id: my-metadata-app', + 'active_actors_count: {}', + 'registered_components:', + 'We will update our custom label value and check it was persisted', + 'We added a custom label named [is-this-our-metadata-example]', +] + + +@pytest.mark.example_dir('metadata') +def test_metadata(dapr): + output = dapr.run( + '--app-id=my-metadata-app --app-protocol grpc --resources-path components/ python3 app.py', + timeout=10, + ) + for line in EXPECTED_LINES: + assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_pubsub_simple.py b/tests/integration/test_pubsub_simple.py new file mode 100644 index 000000000..1b4f84981 --- /dev/null +++ b/tests/integration/test_pubsub_simple.py @@ -0,0 +1,43 @@ +import pytest + +EXPECTED_SUBSCRIBER = [ + 'Subscriber received: id=1, message="hello world", content_type="application/json"', + 'Subscriber received: id=2, message="hello world", content_type="application/json"', + 'Subscriber received: id=3, message="hello world", content_type="application/json"', + 'Wildcard-Subscriber received: id=4, message="hello world", content_type="application/json"', + 'Wildcard-Subscriber received: id=5, message="hello world", content_type="application/json"', + 'Wildcard-Subscriber received: id=6, message="hello world", content_type="application/json"', + 'Dead-Letter Subscriber received: id=7, message="hello world", content_type="application/json"', + 'Dead-Letter Subscriber. Received via deadletter topic: TOPIC_D_DEAD', + 'Dead-Letter Subscriber. Originally intended topic: TOPIC_D', + 'Subscriber received: TOPIC_CE', +] + +EXPECTED_PUBLISHER = [ + "{'id': 1, 'message': 'hello world'}", + "{'id': 2, 'message': 'hello world'}", + "{'id': 3, 'message': 'hello world'}", + 'Bulk published 3 events. Failed entries: 0', +] + + +@pytest.mark.example_dir('pubsub-simple') +def test_pubsub_simple(dapr): + subscriber = dapr.start( + '--app-id python-subscriber --app-protocol grpc --app-port 50051 -- python3 subscriber.py', + wait=5, + ) + publisher_output = dapr.run( + '--app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 ' + '--enable-app-health-check -- python3 publisher.py', + timeout=30, + ) + for line in EXPECTED_PUBLISHER: + assert line in publisher_output, f'Missing in publisher output: {line}' + + import time + + time.sleep(5) + subscriber_output = dapr.stop(subscriber) + for line in EXPECTED_SUBSCRIBER: + assert line in subscriber_output, f'Missing in subscriber output: {line}' diff --git a/tests/integration/test_pubsub_streaming.py b/tests/integration/test_pubsub_streaming.py new file mode 100644 index 000000000..81b3055bd --- /dev/null +++ b/tests/integration/test_pubsub_streaming.py @@ -0,0 +1,69 @@ +import time + +import pytest + +EXPECTED_SUBSCRIBER = [ + "Processing message: {'id': 1, 'message': 'hello world'} from TOPIC_A1...", + "Processing message: {'id': 2, 'message': 'hello world'} from TOPIC_A1...", + "Processing message: {'id': 3, 'message': 'hello world'} from TOPIC_A1...", + "Processing message: {'id': 4, 'message': 'hello world'} from TOPIC_A1...", + "Processing message: {'id': 5, 'message': 'hello world'} from TOPIC_A1...", + 'Closing subscription...', +] + +EXPECTED_HANDLER_SUBSCRIBER = [ + "Processing message: {'id': 1, 'message': 'hello world'} from TOPIC_A2...", + "Processing message: {'id': 2, 'message': 'hello world'} from TOPIC_A2...", + "Processing message: {'id': 3, 'message': 'hello world'} from TOPIC_A2...", + "Processing message: {'id': 4, 'message': 'hello world'} from TOPIC_A2...", + "Processing message: {'id': 5, 'message': 'hello world'} from TOPIC_A2...", + 'Closing subscription...', +] + +EXPECTED_PUBLISHER = [ + "{'id': 1, 'message': 'hello world'}", + "{'id': 2, 'message': 'hello world'}", + "{'id': 3, 'message': 'hello world'}", + "{'id': 4, 'message': 'hello world'}", + "{'id': 5, 'message': 'hello world'}", +] + + +@pytest.mark.example_dir('pubsub-streaming') +def test_pubsub_streaming(dapr): + subscriber = dapr.start( + '--app-id python-subscriber --app-protocol grpc -- python3 subscriber.py --topic=TOPIC_A1', + wait=5, + ) + publisher_output = dapr.run( + '--app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 ' + '--enable-app-health-check -- python3 publisher.py --topic=TOPIC_A1', + timeout=30, + ) + for line in EXPECTED_PUBLISHER: + assert line in publisher_output, f'Missing in publisher output: {line}' + + time.sleep(5) + subscriber_output = dapr.stop(subscriber) + for line in EXPECTED_SUBSCRIBER: + assert line in subscriber_output, f'Missing in subscriber output: {line}' + + +@pytest.mark.example_dir('pubsub-streaming') +def test_pubsub_streaming_handler(dapr): + subscriber = dapr.start( + '--app-id python-subscriber --app-protocol grpc -- python3 subscriber-handler.py --topic=TOPIC_A2', + wait=5, + ) + publisher_output = dapr.run( + '--app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 ' + '--enable-app-health-check -- python3 publisher.py --topic=TOPIC_A2', + timeout=30, + ) + for line in EXPECTED_PUBLISHER: + assert line in publisher_output, f'Missing in publisher output: {line}' + + time.sleep(5) + subscriber_output = dapr.stop(subscriber) + for line in EXPECTED_HANDLER_SUBSCRIBER: + assert line in subscriber_output, f'Missing in subscriber output: {line}' diff --git a/tests/integration/test_pubsub_streaming_async.py b/tests/integration/test_pubsub_streaming_async.py new file mode 100644 index 000000000..d98a9a670 --- /dev/null +++ b/tests/integration/test_pubsub_streaming_async.py @@ -0,0 +1,69 @@ +import time + +import pytest + +EXPECTED_SUBSCRIBER = [ + "Processing message: {'id': 1, 'message': 'hello world'} from TOPIC_B1...", + "Processing message: {'id': 2, 'message': 'hello world'} from TOPIC_B1...", + "Processing message: {'id': 3, 'message': 'hello world'} from TOPIC_B1...", + "Processing message: {'id': 4, 'message': 'hello world'} from TOPIC_B1...", + "Processing message: {'id': 5, 'message': 'hello world'} from TOPIC_B1...", + 'Closing subscription...', +] + +EXPECTED_HANDLER_SUBSCRIBER = [ + "Processing message: {'id': 1, 'message': 'hello world'} from TOPIC_B2...", + "Processing message: {'id': 2, 'message': 'hello world'} from TOPIC_B2...", + "Processing message: {'id': 3, 'message': 'hello world'} from TOPIC_B2...", + "Processing message: {'id': 4, 'message': 'hello world'} from TOPIC_B2...", + "Processing message: {'id': 5, 'message': 'hello world'} from TOPIC_B2...", + 'Closing subscription...', +] + +EXPECTED_PUBLISHER = [ + "{'id': 1, 'message': 'hello world'}", + "{'id': 2, 'message': 'hello world'}", + "{'id': 3, 'message': 'hello world'}", + "{'id': 4, 'message': 'hello world'}", + "{'id': 5, 'message': 'hello world'}", +] + + +@pytest.mark.example_dir('pubsub-streaming-async') +def test_pubsub_streaming_async(dapr): + subscriber = dapr.start( + '--app-id python-subscriber --app-protocol grpc -- python3 subscriber.py --topic=TOPIC_B1', + wait=5, + ) + publisher_output = dapr.run( + '--app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 ' + '--enable-app-health-check -- python3 publisher.py --topic=TOPIC_B1', + timeout=30, + ) + for line in EXPECTED_PUBLISHER: + assert line in publisher_output, f'Missing in publisher output: {line}' + + time.sleep(5) + subscriber_output = dapr.stop(subscriber) + for line in EXPECTED_SUBSCRIBER: + assert line in subscriber_output, f'Missing in subscriber output: {line}' + + +@pytest.mark.example_dir('pubsub-streaming-async') +def test_pubsub_streaming_async_handler(dapr): + subscriber = dapr.start( + '--app-id python-subscriber --app-protocol grpc -- python3 subscriber-handler.py --topic=TOPIC_B2', + wait=5, + ) + publisher_output = dapr.run( + '--app-id python-publisher --app-protocol grpc --dapr-grpc-port=3500 ' + '--enable-app-health-check -- python3 publisher.py --topic=TOPIC_B2', + timeout=30, + ) + for line in EXPECTED_PUBLISHER: + assert line in publisher_output, f'Missing in publisher output: {line}' + + time.sleep(5) + subscriber_output = dapr.stop(subscriber) + for line in EXPECTED_HANDLER_SUBSCRIBER: + assert line in subscriber_output, f'Missing in subscriber output: {line}' diff --git a/tests/integration/test_secret_store.py b/tests/integration/test_secret_store.py new file mode 100644 index 000000000..f14baf0eb --- /dev/null +++ b/tests/integration/test_secret_store.py @@ -0,0 +1,33 @@ +import pytest + +EXPECTED_LINES = [ + "{'secretKey': 'secretValue'}", + "[('random', {'random': 'randomValue'}), ('secretKey', {'secretKey': 'secretValue'})]", + "{'random': 'randomValue'}", +] + +EXPECTED_LINES_WITH_ACL = [ + "{'secretKey': 'secretValue'}", + "[('secretKey', {'secretKey': 'secretValue'})]", + 'Got expected error for accessing random key', +] + + +@pytest.mark.example_dir('secret_store') +def test_secret_store(dapr): + output = dapr.run( + '--app-id=secretsapp --app-protocol grpc --resources-path components/ -- python3 example.py', + timeout=30, + ) + for line in EXPECTED_LINES: + assert line in output, f'Missing in output: {line}' + + +@pytest.mark.example_dir('secret_store') +def test_secret_store_with_access_control(dapr): + output = dapr.run( + '--app-id=secretsapp --app-protocol grpc --config config.yaml --resources-path components/ -- python3 example.py', + timeout=30, + ) + for line in EXPECTED_LINES_WITH_ACL: + assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_state_store.py b/tests/integration/test_state_store.py new file mode 100644 index 000000000..1f22f6541 --- /dev/null +++ b/tests/integration/test_state_store.py @@ -0,0 +1,26 @@ +import pytest + +from conftest import assert_lines_in_output + +EXPECTED_LINES = [ + 'State store has successfully saved value_1 with key_1 as key', + 'Cannot save due to bad etag. ErrorCode=StatusCode.ABORTED', + 'State store has successfully saved value_2 with key_2 as key', + 'State store has successfully saved value_3 with key_3 as key', + 'Cannot save bulk due to bad etags. ErrorCode=StatusCode.ABORTED', + "Got value=b'value_1' eTag=1", + "Got items with etags: [(b'value_1_updated', '2'), (b'value_2', '2')]", + 'Transaction with outbox pattern executed successfully!', + "Got value after outbox pattern: b'val1'", + "Got values after transaction delete: [b'', b'']", + "Got value after delete: b''", +] + + +@pytest.mark.example_dir('state_store') +def test_state_store(dapr): + output = dapr.run( + '--resources-path components/ -- python3 state_store.py', + timeout=30, + ) + assert_lines_in_output(output, EXPECTED_LINES, ordered=True) diff --git a/tests/integration/test_state_store_query.py b/tests/integration/test_state_store_query.py new file mode 100644 index 000000000..02319cca6 --- /dev/null +++ b/tests/integration/test_state_store_query.py @@ -0,0 +1,55 @@ +import subprocess +from pathlib import Path + +import pytest + +REPO_ROOT = Path(__file__).resolve().parent.parent.parent +EXAMPLE_DIR = REPO_ROOT / 'examples' / 'state_store_query' + +EXPECTED_LINES = [ + '1 {"city": "Seattle", "person": {"id": 1036.0, "org": "Dev Ops"}, "state": "WA"}', + '4 {"city": "Spokane", "person": {"id": 1042.0, "org": "Dev Ops"}, "state": "WA"}', + '10 {"city": "New York", "person": {"id": 1054.0, "org": "Dev Ops"}, "state": "NY"}', + 'Token: 3', + '9 {"city": "San Diego", "person": {"id": 1002.0, "org": "Finance"}, "state": "CA"}', + '7 {"city": "San Francisco", "person": {"id": 1015.0, "org": "Dev Ops"}, "state": "CA"}', + '3 {"city": "Sacramento", "person": {"id": 1071.0, "org": "Finance"}, "state": "CA"}', + 'Token: 6', +] + + +@pytest.fixture() +def mongodb(): + subprocess.run( + 'docker run -d --rm -p 27017:27017 --name mongodb mongo:5', + shell=True, + check=True, + capture_output=True, + ) + yield + subprocess.run( + 'docker kill mongodb', + shell=True, + capture_output=True, + ) + + +@pytest.fixture() +def import_data(mongodb, dapr): + """Import the test dataset into the state store via Dapr.""" + dapr.run( + '--app-id demo --dapr-http-port 3500 --resources-path components ' + '-- curl -X POST -H "Content-Type: application/json" ' + '-d @dataset.json http://localhost:3500/v1.0/state/statestore', + timeout=15, + ) + + +@pytest.mark.example_dir('state_store_query') +def test_state_store_query(dapr, import_data): + output = dapr.run( + '--app-id queryexample --resources-path components/ -- python3 state_store_query.py', + timeout=10, + ) + for line in EXPECTED_LINES: + assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_w3c_tracing.py b/tests/integration/test_w3c_tracing.py new file mode 100644 index 000000000..4cd53780a --- /dev/null +++ b/tests/integration/test_w3c_tracing.py @@ -0,0 +1,25 @@ +import pytest + +EXPECTED_CALLER = [ + 'application/json', + 'SAY', + 'text/plain', + 'SLEEP', + 'Trace ID matches after forwarding', +] + + +@pytest.mark.example_dir('w3c-tracing') +def test_w3c_tracing(dapr): + receiver = dapr.start( + '--app-id invoke-receiver --app-protocol grpc --app-port 3001 python3 invoke-receiver.py', + wait=5, + ) + caller_output = dapr.run( + '--app-id invoke-caller --app-protocol grpc python3 invoke-caller.py', + timeout=30, + ) + for line in EXPECTED_CALLER: + assert line in caller_output, f'Missing in caller output: {line}' + + dapr.stop(receiver) diff --git a/tests/integration/test_workflow.py b/tests/integration/test_workflow.py new file mode 100644 index 000000000..b05b3a299 --- /dev/null +++ b/tests/integration/test_workflow.py @@ -0,0 +1,46 @@ +import pytest + +EXPECTED_TASK_CHAINING = [ + 'Step 1: Received input: 42.', + 'Step 2: Received input: 43.', + 'Step 3: Received input: 86.', + 'Workflow completed! Status: WorkflowStatus.COMPLETED', +] + +EXPECTED_FAN_OUT_FAN_IN = [ + 'Final result: 110.', +] + +EXPECTED_SIMPLE = [ + 'Hi Counter!', + 'New counter value is: 1!', + 'New counter value is: 11!', + 'Retry count value is: 0!', + 'Retry count value is: 1! This print statement verifies retry', + 'Get response from hello_world_wf after pause call: SUSPENDED', + 'Get response from hello_world_wf after resume call: RUNNING', + 'New counter value is: 111!', + 'New counter value is: 1111!', + 'Workflow completed! Result: Completed', +] + + +@pytest.mark.example_dir('workflow') +def test_task_chaining(dapr): + output = dapr.run('-- python3 task_chaining.py', timeout=30) + for line in EXPECTED_TASK_CHAINING: + assert line in output, f'Missing in output: {line}' + + +@pytest.mark.example_dir('workflow') +def test_fan_out_fan_in(dapr): + output = dapr.run('-- python3 fan_out_fan_in.py', timeout=60) + for line in EXPECTED_FAN_OUT_FAN_IN: + assert line in output, f'Missing in output: {line}' + + +@pytest.mark.example_dir('workflow') +def test_simple_workflow(dapr): + output = dapr.run('-- python3 simple.py', timeout=60) + for line in EXPECTED_SIMPLE: + assert line in output, f'Missing in output: {line}' diff --git a/tox.ini b/tox.ini index 698e383e9..93cfea2b2 100644 --- a/tox.ini +++ b/tox.ini @@ -38,71 +38,30 @@ commands = ruff check --fix ruff format -[testenv:examples] +[testenv:integration] +; Pytest-based integration tests that validate the examples/ directory. +; Usage: tox -e integration # run all +; tox -e integration -- test_state_store.py # run one passenv = HOME basepython = python3 -changedir = ./examples/ -deps = - mechanical-markdown - +changedir = ./tests/integration/ commands = - ./validate.sh conversation - ./validate.sh crypto - ./validate.sh metadata - ./validate.sh error_handling - ./validate.sh pubsub-simple - ./validate.sh pubsub-streaming - ./validate.sh pubsub-streaming-async - ./validate.sh state_store - ./validate.sh state_store_query - ./validate.sh secret_store - ./validate.sh invoke-simple - ./validate.sh invoke-custom-data - ./validate.sh demo_actor - ./validate.sh invoke-binding - ./validate.sh grpc_proxying - ./validate.sh w3c-tracing - ./validate.sh distributed_lock - ./validate.sh configuration - ./validate.sh workflow - ./validate.sh jobs - ./validate.sh langgraph-checkpointer - ./validate.sh ../ -allowlist_externals=* - -commands_pre = - pip uninstall -y dapr dapr-ext-grpc dapr-ext-fastapi dapr-ext-langgraph dapr-ext-strands dapr-ext-flask dapr-ext-langgraph dapr-ext-strands - pip install -e {toxinidir}/ \ - -e {toxinidir}/ext/dapr-ext-workflow/ \ - -e {toxinidir}/ext/dapr-ext-grpc/ \ - -e {toxinidir}/ext/dapr-ext-fastapi/ \ - -e {toxinidir}/ext/dapr-ext-langgraph/ \ - -e {toxinidir}/ext/dapr-ext-strands/ \ - -e {toxinidir}/ext/flask_dapr/ - -[testenv:example-component] -; This environment is used to validate a specific example component. -; Usage: tox -e example-component -- component_name -; Example: tox -e example-component -- conversation -passenv = HOME -basepython = python3 -changedir = ./examples/ -deps = - mechanical-markdown -commands = - ./validate.sh {posargs} + pytest {posargs} -v --tb=short allowlist_externals=* commands_pre = pip uninstall -y dapr dapr-ext-grpc dapr-ext-fastapi dapr-ext-langgraph dapr-ext-strands dapr-ext-flask dapr-ext-langgraph dapr-ext-strands - pip install -e {toxinidir}/ \ + pip install -r {toxinidir}/dev-requirements.txt \ + -e {toxinidir}/ \ -e {toxinidir}/ext/dapr-ext-workflow/ \ -e {toxinidir}/ext/dapr-ext-grpc/ \ -e {toxinidir}/ext/dapr-ext-fastapi/ \ -e {toxinidir}/ext/dapr-ext-langgraph/ \ -e {toxinidir}/ext/dapr-ext-strands/ \ - -e {toxinidir}/ext/flask_dapr/ + -e {toxinidir}/ext/flask_dapr/ \ + opentelemetry-exporter-zipkin \ + uvicorn [testenv:type] basepython = python3 From f2cb1894d225e8ac1615627afe888a33f90cfd2a Mon Sep 17 00:00:00 2001 From: Sergio Herrera Date: Mon, 13 Apr 2026 14:13:22 +0200 Subject: [PATCH 02/20] Address Copilot comments Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- .github/workflows/validate_examples.yaml | 4 +- tests/integration/conftest.py | 105 +++++++++-------------- tests/integration/test_invoke_binding.py | 20 ++--- tests/integration/test_state_store.py | 5 +- 4 files changed, 52 insertions(+), 82 deletions(-) diff --git a/.github/workflows/validate_examples.yaml b/.github/workflows/validate_examples.yaml index e6d8e0485..7f109a868 100644 --- a/.github/workflows/validate_examples.yaml +++ b/.github/workflows/validate_examples.yaml @@ -151,6 +151,6 @@ jobs: cd dapr_runtime ./dist/linux_amd64/release/placement --healthz-port 9091 & cd .. - - name: Check Examples + - name: Check examples run: | - tox -e examples + tox -e integration diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 2cc7f33e9..ade6b2286 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,9 +1,10 @@ import signal import subprocess +import tempfile import threading import time from pathlib import Path -from typing import Any, Generator +from typing import IO, Any, Generator import pytest @@ -16,9 +17,8 @@ class DaprRunner: def __init__(self, cwd: Path) -> None: self._cwd = cwd - self._bg: subprocess.Popen[str] | None = None - self._bg_lines: list[str] = [] - self._bg_reader: threading.Thread | None = None + self._bg_process: subprocess.Popen[str] | None = None + self._bg_output_file: IO[str] | None = None def _spawn(self, args: str) -> subprocess.Popen[str]: return subprocess.Popen( @@ -42,7 +42,10 @@ def _terminate(proc: subprocess.Popen[str]) -> None: proc.wait() def run(self, args: str, *, timeout: int = 30, until: list[str] | None = None) -> str: - """Run a `dapr run` command, stream output, and return it. + """Run a foreground command, block until it finishes, and return output. + + Use this for short-lived processes (e.g. a publisher that exits on its + own). For long-lived background services, use ``start()``/``stop()``. Args: args: Arguments passed to ``dapr run``. @@ -58,7 +61,7 @@ def run(self, args: str, *, timeout: int = 30, until: list[str] | None = None) - # Kill the process if it exceeds the timeout. A background timer is # needed because `for line in proc.stdout` blocks indefinitely when # the child never exits. - timer = threading.Timer(timeout, proc.kill) + timer = threading.Timer(interval=timeout, function=proc.kill) timer.start() try: @@ -77,73 +80,49 @@ def run(self, args: str, *, timeout: int = 30, until: list[str] | None = None) - return ''.join(lines) def start(self, args: str, *, wait: int = 5) -> subprocess.Popen[str]: - """Start a `dapr run` command in the background and return the handle. + """Start a long-lived background service and return the process handle. - A reader thread continuously drains stdout so the pipe buffer never - fills up (which would block the child process). + Use this for servers/subscribers that must stay alive while a second + process runs via ``run()``. Call ``stop()`` to terminate and collect + output. Stdout is written to a temp file to avoid pipe-buffer deadlocks. """ - proc = self._spawn(args) - self._bg = proc - self._bg_lines = [] - - def drain() -> None: - assert proc.stdout is not None - for line in proc.stdout: - print(line, end='', flush=True) - self._bg_lines.append(line) - - self._bg_reader = threading.Thread(target=drain, daemon=True) - self._bg_reader.start() + output_file = tempfile.NamedTemporaryFile(mode='w+', suffix='.log', delete=False) + proc = subprocess.Popen( + f'dapr run {args}', + shell=True, + cwd=self._cwd, + stdout=output_file, + stderr=subprocess.STDOUT, + text=True, + ) + self._bg_process = proc + self._bg_output_file = output_file time.sleep(wait) return proc def stop(self, proc: subprocess.Popen[str]) -> str: """Stop a background process and return its captured output.""" self._terminate(proc) - self._bg = None - if self._bg_reader is not None: - self._bg_reader.join(timeout=5) - self._bg_reader = None - output = ''.join(self._bg_lines) - self._bg_lines = [] - return output + self._bg_process = None + return self._read_and_close_output() def cleanup(self) -> None: - """Stop the background process if still running (teardown safety net).""" - if self._bg is not None: - self._terminate(self._bg) - self._bg = None - if self._bg_reader is not None: - self._bg_reader.join(timeout=5) - self._bg_reader = None - self._bg_lines = [] - - -def assert_lines_in_output(output: str, expected_lines: list[str], *, ordered: bool = True) -> None: - """Assert that each expected line appears as a substring in the output. - - Args: - output: The combined stdout/stderr string. - expected_lines: List of strings that must appear in the output. - ordered: If True, the expected lines must appear in order. - """ - missing = [line for line in expected_lines if line not in output] - assert not missing, ( - f'Missing expected lines in output:\n Missing: {missing}\n Output:\n{output}' - ) - - if not ordered: - return - - positions = [output.index(line) for line in expected_lines] - out_of_order = [ - (expected_lines[i], expected_lines[i + 1]) - for i in range(len(positions) - 1) - if positions[i] > positions[i + 1] - ] - assert not out_of_order, ( - f'Lines appeared out of order:\n Out of order pairs: {out_of_order}\n Output:\n{output}' - ) + """Stop the background process if still running.""" + if self._bg_process is not None: + self._terminate(self._bg_process) + self._bg_process = None + self._read_and_close_output() + + def _read_and_close_output(self) -> str: + if self._bg_output_file is None: + return '' + output_path = Path(self._bg_output_file.name) + self._bg_output_file.close() + self._bg_output_file = None + output = output_path.read_text() + output_path.unlink(missing_ok=True) + print(output, end='', flush=True) + return output @pytest.fixture diff --git a/tests/integration/test_invoke_binding.py b/tests/integration/test_invoke_binding.py index e2a39b2c7..db89aa1bc 100644 --- a/tests/integration/test_invoke_binding.py +++ b/tests/integration/test_invoke_binding.py @@ -1,10 +1,9 @@ -import json import subprocess import time -import urllib.request from pathlib import Path import pytest +import httpx REPO_ROOT = Path(__file__).resolve().parent.parent.parent BINDING_DIR = REPO_ROOT / 'examples' / 'invoke-binding' @@ -46,18 +45,11 @@ def test_invoke_binding(dapr, kafka): # Publish through the receiver's sidecar (both scripts are infinite, # so we reimplement the publisher here with a bounded loop). for n in range(1, 4): - body = json.dumps( - { - 'operation': 'create', - 'data': {'id': n, 'message': 'hello world'}, - } - ).encode() - req = urllib.request.Request( - 'http://localhost:3500/v1.0/bindings/kafkaBinding', - data=body, - headers={'Content-Type': 'application/json'}, - ) - urllib.request.urlopen(req) + payload = { + 'operation': 'create', + 'data': {'id': n, 'message': 'hello world'}, + } + httpx.post('http://localhost:3500/v1.0/bindings/kafkaBinding', json=payload) time.sleep(1) time.sleep(5) diff --git a/tests/integration/test_state_store.py b/tests/integration/test_state_store.py index 1f22f6541..05d67d032 100644 --- a/tests/integration/test_state_store.py +++ b/tests/integration/test_state_store.py @@ -1,7 +1,5 @@ import pytest -from conftest import assert_lines_in_output - EXPECTED_LINES = [ 'State store has successfully saved value_1 with key_1 as key', 'Cannot save due to bad etag. ErrorCode=StatusCode.ABORTED', @@ -23,4 +21,5 @@ def test_state_store(dapr): '--resources-path components/ -- python3 state_store.py', timeout=30, ) - assert_lines_in_output(output, EXPECTED_LINES, ordered=True) + for line in EXPECTED_LINES: + assert line in output, f'Missing in output: {line}' From a7989221fba2197a3ec852c222bf0dd21d5211eb Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Mon, 13 Apr 2026 19:06:11 +0200 Subject: [PATCH 03/20] Address Copilot comments (2) Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tests/integration/test_invoke_binding.py | 1 + tests/integration/test_langgraph_checkpointer.py | 2 -- tests/integration/test_state_store_query.py | 4 ---- 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/tests/integration/test_invoke_binding.py b/tests/integration/test_invoke_binding.py index db89aa1bc..5e2503fa6 100644 --- a/tests/integration/test_invoke_binding.py +++ b/tests/integration/test_invoke_binding.py @@ -30,6 +30,7 @@ def kafka(): 'docker compose -f ./docker-compose-single-kafka.yml down', shell=True, cwd=BINDING_DIR, + check=True, capture_output=True, ) diff --git a/tests/integration/test_langgraph_checkpointer.py b/tests/integration/test_langgraph_checkpointer.py index 93f758472..fd4d88424 100644 --- a/tests/integration/test_langgraph_checkpointer.py +++ b/tests/integration/test_langgraph_checkpointer.py @@ -1,7 +1,5 @@ import pytest -langchain_ollama = pytest.importorskip('langchain_ollama', reason='langchain-ollama not installed') - EXPECTED_LINES = [ 'Add 3 and 4.', '7', diff --git a/tests/integration/test_state_store_query.py b/tests/integration/test_state_store_query.py index 02319cca6..1af276660 100644 --- a/tests/integration/test_state_store_query.py +++ b/tests/integration/test_state_store_query.py @@ -1,11 +1,7 @@ import subprocess -from pathlib import Path import pytest -REPO_ROOT = Path(__file__).resolve().parent.parent.parent -EXAMPLE_DIR = REPO_ROOT / 'examples' / 'state_store_query' - EXPECTED_LINES = [ '1 {"city": "Seattle", "person": {"id": 1036.0, "org": "Dev Ops"}, "state": "WA"}', '4 {"city": "Spokane", "person": {"id": 1042.0, "org": "Dev Ops"}, "state": "WA"}', From 93cbe59b5b9d974d90c58175f4fdc52b41aa10e7 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Tue, 14 Apr 2026 16:10:32 +0200 Subject: [PATCH 04/20] Make Langgraph test runnable in local without API keys Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- .../test_langgraph_checkpointer.py | 63 +++++++++++++++++-- tox.ini | 1 + 2 files changed, 59 insertions(+), 5 deletions(-) diff --git a/tests/integration/test_langgraph_checkpointer.py b/tests/integration/test_langgraph_checkpointer.py index fd4d88424..54282398c 100644 --- a/tests/integration/test_langgraph_checkpointer.py +++ b/tests/integration/test_langgraph_checkpointer.py @@ -1,5 +1,12 @@ +import subprocess +import time + +import httpx import pytest +OLLAMA_URL = 'http://localhost:11434' +MODEL = 'llama3.2:3b' + EXPECTED_LINES = [ 'Add 3 and 4.', '7', @@ -7,13 +14,59 @@ ] +def _ollama_ready() -> bool: + try: + return httpx.get(f'{OLLAMA_URL}/api/tags', timeout=2).is_success + except httpx.ConnectError: + return False + + +def _model_available() -> bool: + resp = httpx.get(f'{OLLAMA_URL}/api/tags', timeout=5) + return any(m['name'] == MODEL for m in resp.json().get('models', [])) + + +@pytest.fixture() +def ollama(): + """Ensure Ollama is running and the required model is pulled. + + Reuses a running instance if available, otherwise starts one for + the duration of the test. Skips if the ollama CLI is not installed. + """ + started: subprocess.Popen[bytes] | None = None + if not _ollama_ready(): + try: + started = subprocess.Popen( + ['ollama', 'serve'], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + except FileNotFoundError: + pytest.skip('ollama is not installed') + time.sleep(10) + + if not _model_available(): + subprocess.run(['ollama', 'pull', MODEL], check=True, capture_output=True) + + yield + + if started: + started.terminate() + started.wait() + + +@pytest.fixture() +def flush_redis(): + """This test is not replayable if the checkpointer state store is not clean""" + subprocess.run(['redis-cli', 'FLUSHDB'], capture_output=True) + + @pytest.mark.example_dir('langgraph-checkpointer') -def test_langgraph_checkpointer(dapr): - proc = dapr.start( - '--app-id langgraph-checkpointer --app-port 5001 --dapr-grpc-port 5002 ' +def test_langgraph_checkpointer(dapr, ollama, flush_redis): + output = dapr.run( + '--app-id langgraph-checkpointer --dapr-grpc-port 5002 ' '--resources-path ./components -- python3 agent.py', - wait=15, + timeout=120, ) - output = dapr.stop(proc) for line in EXPECTED_LINES: assert line in output, f'Missing in output: {line}' diff --git a/tox.ini b/tox.ini index 93cfea2b2..809d8cc77 100644 --- a/tox.ini +++ b/tox.ini @@ -61,6 +61,7 @@ commands_pre = -e {toxinidir}/ext/dapr-ext-strands/ \ -e {toxinidir}/ext/flask_dapr/ \ opentelemetry-exporter-zipkin \ + langchain-ollama \ uvicorn [testenv:type] From f7b93180833f401d8678531a0e6944f46dbf248e Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Tue, 14 Apr 2026 17:39:34 +0200 Subject: [PATCH 05/20] Address Copilot comments (3) Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tests/integration/conftest.py | 10 ++++------ tests/integration/test_invoke_binding.py | 4 +++- tests/integration/test_invoke_simple.py | 21 ++++++++------------- tests/integration/test_state_store_query.py | 10 ++++------ tox.ini | 1 - 5 files changed, 19 insertions(+), 27 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index ade6b2286..343c1146b 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,4 +1,4 @@ -import signal +import shlex import subprocess import tempfile import threading @@ -22,8 +22,7 @@ def __init__(self, cwd: Path) -> None: def _spawn(self, args: str) -> subprocess.Popen[str]: return subprocess.Popen( - f'dapr run {args}', - shell=True, + args=('dapr', 'run', *shlex.split(args)), cwd=self._cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, @@ -34,7 +33,7 @@ def _spawn(self, args: str) -> subprocess.Popen[str]: def _terminate(proc: subprocess.Popen[str]) -> None: if proc.poll() is not None: return - proc.send_signal(signal.SIGTERM) + proc.terminate() try: proc.wait(timeout=10) except subprocess.TimeoutExpired: @@ -88,8 +87,7 @@ def start(self, args: str, *, wait: int = 5) -> subprocess.Popen[str]: """ output_file = tempfile.NamedTemporaryFile(mode='w+', suffix='.log', delete=False) proc = subprocess.Popen( - f'dapr run {args}', - shell=True, + args=('dapr', 'run', *shlex.split(args)), cwd=self._cwd, stdout=output_file, stderr=subprocess.STDOUT, diff --git a/tests/integration/test_invoke_binding.py b/tests/integration/test_invoke_binding.py index 5e2503fa6..97ce92ae0 100644 --- a/tests/integration/test_invoke_binding.py +++ b/tests/integration/test_invoke_binding.py @@ -50,7 +50,9 @@ def test_invoke_binding(dapr, kafka): 'operation': 'create', 'data': {'id': n, 'message': 'hello world'}, } - httpx.post('http://localhost:3500/v1.0/bindings/kafkaBinding', json=payload) + response = httpx.post('http://localhost:3500/v1.0/bindings/kafkaBinding', json=payload, timeout=5) + response.raise_for_status() + time.sleep(1) time.sleep(5) diff --git a/tests/integration/test_invoke_simple.py b/tests/integration/test_invoke_simple.py index 6852f29d1..57e809fdf 100644 --- a/tests/integration/test_invoke_simple.py +++ b/tests/integration/test_invoke_simple.py @@ -1,10 +1,8 @@ -import json -import urllib.request - +import httpx import pytest EXPECTED_RECEIVER = [ - '{"id": 1, "message": "hello world"}', + '{"id":1,"message":"hello world"}', ] @@ -18,18 +16,15 @@ def test_invoke_simple(dapr): # invoke-caller.py runs an infinite loop, so we invoke the method # directly through the sidecar HTTP API with a single call. - req_data = json.dumps({'id': 1, 'message': 'hello world'}).encode() - req = urllib.request.Request( + resp = httpx.post( 'http://localhost:3500/v1.0/invoke/invoke-receiver/method/my-method', - data=req_data, - headers={'Content-Type': 'application/json'}, + json={'id': 1, 'message': 'hello world'}, + timeout=5, ) - with urllib.request.urlopen(req) as resp: - content_type = resp.headers.get('Content-Type', '') - body = resp.read().decode() + resp.raise_for_status() - assert 'text/plain' in content_type - assert 'INVOKE_RECEIVED' in body + assert 'text/plain' in resp.headers.get('content-type', '') + assert 'INVOKE_RECEIVED' in resp.text receiver_output = dapr.stop(receiver) for line in EXPECTED_RECEIVER: diff --git a/tests/integration/test_state_store_query.py b/tests/integration/test_state_store_query.py index 1af276660..31ab3b1c9 100644 --- a/tests/integration/test_state_store_query.py +++ b/tests/integration/test_state_store_query.py @@ -16,18 +16,16 @@ @pytest.fixture() def mongodb(): + # Remove leftover container from a previous crashed run + subprocess.run('docker rm -f pytest-mongodb', shell=True, capture_output=True) subprocess.run( - 'docker run -d --rm -p 27017:27017 --name mongodb mongo:5', + 'docker run -d --rm -p 27017:27017 --name pytest-mongodb mongo:5', shell=True, check=True, capture_output=True, ) yield - subprocess.run( - 'docker kill mongodb', - shell=True, - capture_output=True, - ) + subprocess.run('docker rm -f pytest-mongodb', shell=True, capture_output=True) @pytest.fixture() diff --git a/tox.ini b/tox.ini index 809d8cc77..92c974819 100644 --- a/tox.ini +++ b/tox.ini @@ -53,7 +53,6 @@ allowlist_externals=* commands_pre = pip uninstall -y dapr dapr-ext-grpc dapr-ext-fastapi dapr-ext-langgraph dapr-ext-strands dapr-ext-flask dapr-ext-langgraph dapr-ext-strands pip install -r {toxinidir}/dev-requirements.txt \ - -e {toxinidir}/ \ -e {toxinidir}/ext/dapr-ext-workflow/ \ -e {toxinidir}/ext/dapr-ext-grpc/ \ -e {toxinidir}/ext/dapr-ext-fastapi/ \ From b1a74f423e77fde63411f3ae8f32db0dd21f0ca8 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Wed, 15 Apr 2026 10:32:35 +0200 Subject: [PATCH 06/20] Run ruff format Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tests/integration/test_configuration.py | 1 - tests/integration/test_invoke_binding.py | 6 ++++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/integration/test_configuration.py b/tests/integration/test_configuration.py index 9660e142d..982fc6a52 100644 --- a/tests/integration/test_configuration.py +++ b/tests/integration/test_configuration.py @@ -3,7 +3,6 @@ import pytest - EXPECTED_LINES = [ 'Got key=orderId1 value=100 version=1 metadata={}', 'Got key=orderId2 value=200 version=1 metadata={}', diff --git a/tests/integration/test_invoke_binding.py b/tests/integration/test_invoke_binding.py index 97ce92ae0..fece88487 100644 --- a/tests/integration/test_invoke_binding.py +++ b/tests/integration/test_invoke_binding.py @@ -2,8 +2,8 @@ import time from pathlib import Path -import pytest import httpx +import pytest REPO_ROOT = Path(__file__).resolve().parent.parent.parent BINDING_DIR = REPO_ROOT / 'examples' / 'invoke-binding' @@ -50,7 +50,9 @@ def test_invoke_binding(dapr, kafka): 'operation': 'create', 'data': {'id': n, 'message': 'hello world'}, } - response = httpx.post('http://localhost:3500/v1.0/bindings/kafkaBinding', json=payload, timeout=5) + response = httpx.post( + 'http://localhost:3500/v1.0/bindings/kafkaBinding', json=payload, timeout=5 + ) response.raise_for_status() time.sleep(1) From 71e4311c6ce1e3af24d344e8ca92fd4c20d6f790 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Wed, 15 Apr 2026 11:00:20 +0200 Subject: [PATCH 07/20] Run redis-cli flushdb from container Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tests/integration/test_langgraph_checkpointer.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_langgraph_checkpointer.py b/tests/integration/test_langgraph_checkpointer.py index 54282398c..89e4ab77b 100644 --- a/tests/integration/test_langgraph_checkpointer.py +++ b/tests/integration/test_langgraph_checkpointer.py @@ -57,8 +57,11 @@ def ollama(): @pytest.fixture() def flush_redis(): - """This test is not replayable if the checkpointer state store is not clean""" - subprocess.run(['redis-cli', 'FLUSHDB'], capture_output=True) + """This test is not replayable if the checkpointer state store is not clean.""" + subprocess.run( + ['docker', 'exec', 'dapr_redis', 'redis-cli', 'FLUSHDB'], + capture_output=True, + ) @pytest.mark.example_dir('langgraph-checkpointer') From 0a2f7a22da9498dda3c021439e08289805709a62 Mon Sep 17 00:00:00 2001 From: Sergio Herrera Date: Wed, 15 Apr 2026 13:05:18 +0200 Subject: [PATCH 08/20] Add missing test_invoke_http.py Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tests/integration/test_invoke_http.py | 34 +++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 tests/integration/test_invoke_http.py diff --git a/tests/integration/test_invoke_http.py b/tests/integration/test_invoke_http.py new file mode 100644 index 000000000..a7d82ac23 --- /dev/null +++ b/tests/integration/test_invoke_http.py @@ -0,0 +1,34 @@ +import pytest + +EXPECTED_RECEIVER = [ + 'Order received : {"id": 1, "message": "hello world"}', + 'Order error : {"id": 2, "message": "hello world"}', +] + +EXPECTED_CALLER = [ + 'text/html', + '{"success": true}', + '200', + 'error occurred', + 'MY_CODE', + '503', +] + + +@pytest.mark.example_dir('invoke-http') +def test_invoke_http(dapr): + receiver = dapr.start( + '--app-id invoke-receiver --app-port 8088 --app-protocol http ' + '-- python3 invoke-receiver.py', + wait=5, + ) + caller_output = dapr.run( + '--app-id invoke-caller -- python3 invoke-caller.py', + timeout=30, + ) + for line in EXPECTED_CALLER: + assert line in caller_output, f'Missing in caller output: {line}' + + receiver_output = dapr.stop(receiver) + for line in EXPECTED_RECEIVER: + assert line in receiver_output, f'Missing in receiver output: {line}' From 074b1dc7c533375e52ebf714f702ab90a0fc9240 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Wed, 15 Apr 2026 16:06:22 +0200 Subject: [PATCH 09/20] Address Copilot comments (4) Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tests/integration/test_langgraph_checkpointer.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_langgraph_checkpointer.py b/tests/integration/test_langgraph_checkpointer.py index 89e4ab77b..fd98f6de0 100644 --- a/tests/integration/test_langgraph_checkpointer.py +++ b/tests/integration/test_langgraph_checkpointer.py @@ -17,7 +17,7 @@ def _ollama_ready() -> bool: try: return httpx.get(f'{OLLAMA_URL}/api/tags', timeout=2).is_success - except httpx.ConnectError: + except httpx.RequestError: return False @@ -52,7 +52,7 @@ def ollama(): if started: started.terminate() - started.wait() + started.wait(timeout=10) @pytest.fixture() @@ -61,6 +61,7 @@ def flush_redis(): subprocess.run( ['docker', 'exec', 'dapr_redis', 'redis-cli', 'FLUSHDB'], capture_output=True, + check=True, ) From b07457921dcdebc544da955d8f0b9eaf06267f0d Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Fri, 17 Apr 2026 13:41:04 +0200 Subject: [PATCH 10/20] Editable install for examples/ validation Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 92c974819..809d8cc77 100644 --- a/tox.ini +++ b/tox.ini @@ -53,6 +53,7 @@ allowlist_externals=* commands_pre = pip uninstall -y dapr dapr-ext-grpc dapr-ext-fastapi dapr-ext-langgraph dapr-ext-strands dapr-ext-flask dapr-ext-langgraph dapr-ext-strands pip install -r {toxinidir}/dev-requirements.txt \ + -e {toxinidir}/ \ -e {toxinidir}/ext/dapr-ext-workflow/ \ -e {toxinidir}/ext/dapr-ext-grpc/ \ -e {toxinidir}/ext/dapr-ext-fastapi/ \ From a4912dc2efabc099fcb0c440b377dd620d749f22 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Mon, 20 Apr 2026 11:30:18 +0200 Subject: [PATCH 11/20] Add timeout to cmd in test_langgraph_checkpointer.py Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tests/integration/test_langgraph_checkpointer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/test_langgraph_checkpointer.py b/tests/integration/test_langgraph_checkpointer.py index fd98f6de0..75b6f69c2 100644 --- a/tests/integration/test_langgraph_checkpointer.py +++ b/tests/integration/test_langgraph_checkpointer.py @@ -62,6 +62,7 @@ def flush_redis(): ['docker', 'exec', 'dapr_redis', 'redis-cli', 'FLUSHDB'], capture_output=True, check=True, + timeout=10, ) From e200d3e0225f4c08c4156cd0b93ad29afbb0a26f Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Mon, 20 Apr 2026 12:51:45 +0200 Subject: [PATCH 12/20] Address Copilot comments (4) Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- examples/distributed_lock/lock.py | 2 +- tests/integration/conftest.py | 1 + tests/integration/test_distributed_lock.py | 2 +- tests/integration/test_invoke_binding.py | 43 ++++++++++++------- .../test_langgraph_checkpointer.py | 19 ++++++-- tests/integration/test_pubsub_simple.py | 3 +- tests/integration/test_workflow.py | 6 +-- 7 files changed, 51 insertions(+), 25 deletions(-) diff --git a/examples/distributed_lock/lock.py b/examples/distributed_lock/lock.py index 2f6364065..65c752c27 100644 --- a/examples/distributed_lock/lock.py +++ b/examples/distributed_lock/lock.py @@ -27,7 +27,7 @@ def main(): print('Will try to acquire a lock from lock store named [%s]' % store_name) print('The lock is for a resource named [%s]' % resource_id) print('The client identifier is [%s]' % client_id) - print('The lock will will expire in %s seconds.' % expiry_in_seconds) + print('The lock will expire in %s seconds.' % expiry_in_seconds) with dapr.try_lock(store_name, resource_id, client_id, expiry_in_seconds) as lock_result: assert lock_result.success, 'Failed to acquire the lock. Aborting.' diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 343c1146b..212ce140f 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -33,6 +33,7 @@ def _spawn(self, args: str) -> subprocess.Popen[str]: def _terminate(proc: subprocess.Popen[str]) -> None: if proc.poll() is not None: return + proc.terminate() try: proc.wait(timeout=10) diff --git a/tests/integration/test_distributed_lock.py b/tests/integration/test_distributed_lock.py index 1d353484d..a2ac0c6b0 100644 --- a/tests/integration/test_distributed_lock.py +++ b/tests/integration/test_distributed_lock.py @@ -4,7 +4,7 @@ 'Will try to acquire a lock from lock store named [lockstore]', 'The lock is for a resource named [example-lock-resource]', 'The client identifier is [example-client-id]', - 'The lock will will expire in 60 seconds.', + 'The lock will expire in 60 seconds.', 'Lock acquired successfully!!!', 'We already released the lock so unlocking will not work.', 'We tried to unlock it anyway and got back [UnlockResponseStatus.lock_does_not_exist]', diff --git a/tests/integration/test_invoke_binding.py b/tests/integration/test_invoke_binding.py index fece88487..2fdad88fa 100644 --- a/tests/integration/test_invoke_binding.py +++ b/tests/integration/test_invoke_binding.py @@ -17,22 +17,35 @@ @pytest.fixture() def kafka(): - subprocess.run( - 'docker compose -f ./docker-compose-single-kafka.yml up -d', - shell=True, - cwd=BINDING_DIR, - check=True, - capture_output=True, - ) - time.sleep(30) + try: + subprocess.run( + 'docker compose -f ./docker-compose-single-kafka.yml up -d', + shell=True, + cwd=BINDING_DIR, + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + timeout=120, + ) + except subprocess.TimeoutExpired as e: + output = (e.stdout or b'').decode(errors='replace') + pytest.fail(f'Timed out starting Kafka:\n{output}') + yield - subprocess.run( - 'docker compose -f ./docker-compose-single-kafka.yml down', - shell=True, - cwd=BINDING_DIR, - check=True, - capture_output=True, - ) + + try: + subprocess.run( + 'docker compose -f ./docker-compose-single-kafka.yml down', + shell=True, + cwd=BINDING_DIR, + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + timeout=120, + ) + except subprocess.TimeoutExpired as e: + output = (e.stdout or b'').decode(errors='replace') + pytest.fail(f'Timed out stopping Kafka:\n{output}') @pytest.mark.example_dir('invoke-binding') diff --git a/tests/integration/test_langgraph_checkpointer.py b/tests/integration/test_langgraph_checkpointer.py index 75b6f69c2..9754d60e1 100644 --- a/tests/integration/test_langgraph_checkpointer.py +++ b/tests/integration/test_langgraph_checkpointer.py @@ -5,7 +5,7 @@ import pytest OLLAMA_URL = 'http://localhost:11434' -MODEL = 'llama3.2:3b' +MODEL = 'llama3.2:latest' EXPECTED_LINES = [ 'Add 3 and 4.', @@ -22,10 +22,23 @@ def _ollama_ready() -> bool: def _model_available() -> bool: - resp = httpx.get(f'{OLLAMA_URL}/api/tags', timeout=5) + try: + resp = httpx.get(f'{OLLAMA_URL}/api/tags', timeout=5) + except httpx.RequestError: + return False + return any(m['name'] == MODEL for m in resp.json().get('models', [])) +def _wait_for_ollama(timeout: float = 30.0, interval: float = 0.5) -> None: + deadline = time.monotonic() + timeout + while time.monotonic() < deadline: + if _ollama_ready(): + return + time.sleep(interval) + raise TimeoutError(f'ollama serve did not become ready within {timeout}s') + + @pytest.fixture() def ollama(): """Ensure Ollama is running and the required model is pulled. @@ -43,7 +56,7 @@ def ollama(): ) except FileNotFoundError: pytest.skip('ollama is not installed') - time.sleep(10) + _wait_for_ollama() if not _model_available(): subprocess.run(['ollama', 'pull', MODEL], check=True, capture_output=True) diff --git a/tests/integration/test_pubsub_simple.py b/tests/integration/test_pubsub_simple.py index 1b4f84981..4d18eba61 100644 --- a/tests/integration/test_pubsub_simple.py +++ b/tests/integration/test_pubsub_simple.py @@ -1,3 +1,4 @@ +import time import pytest EXPECTED_SUBSCRIBER = [ @@ -35,8 +36,6 @@ def test_pubsub_simple(dapr): for line in EXPECTED_PUBLISHER: assert line in publisher_output, f'Missing in publisher output: {line}' - import time - time.sleep(5) subscriber_output = dapr.stop(subscriber) for line in EXPECTED_SUBSCRIBER: diff --git a/tests/integration/test_workflow.py b/tests/integration/test_workflow.py index b05b3a299..bf641c6da 100644 --- a/tests/integration/test_workflow.py +++ b/tests/integration/test_workflow.py @@ -27,20 +27,20 @@ @pytest.mark.example_dir('workflow') def test_task_chaining(dapr): - output = dapr.run('-- python3 task_chaining.py', timeout=30) + output = dapr.run('--app-id workflow-task-chaining -- python3 task_chaining.py', timeout=30) for line in EXPECTED_TASK_CHAINING: assert line in output, f'Missing in output: {line}' @pytest.mark.example_dir('workflow') def test_fan_out_fan_in(dapr): - output = dapr.run('-- python3 fan_out_fan_in.py', timeout=60) + output = dapr.run('--app-id workflow-fan-out-fan-in -- python3 fan_out_fan_in.py', timeout=60) for line in EXPECTED_FAN_OUT_FAN_IN: assert line in output, f'Missing in output: {line}' @pytest.mark.example_dir('workflow') def test_simple_workflow(dapr): - output = dapr.run('-- python3 simple.py', timeout=60) + output = dapr.run('--app-id workflow-simple -- python3 simple.py', timeout=60) for line in EXPECTED_SIMPLE: assert line in output, f'Missing in output: {line}' From 0617f35a9ab4a6c74aca236cc79d964d120f0eb1 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Mon, 20 Apr 2026 13:54:30 +0200 Subject: [PATCH 13/20] Remove all subprocess(shell=True) calls Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tests/integration/test_configuration.py | 9 +++------ tests/integration/test_crypto.py | 10 +++++----- tests/integration/test_invoke_binding.py | 6 ++---- tests/integration/test_state_store_query.py | 7 +++---- 4 files changed, 13 insertions(+), 19 deletions(-) diff --git a/tests/integration/test_configuration.py b/tests/integration/test_configuration.py index 982fc6a52..6eccdf594 100644 --- a/tests/integration/test_configuration.py +++ b/tests/integration/test_configuration.py @@ -15,14 +15,12 @@ def redis_config(): """Seed configuration values in Redis before the test.""" subprocess.run( - 'docker exec dapr_redis redis-cli SET orderId1 "100||1"', - shell=True, + ('docker', 'exec', 'dapr_redis', 'redis-cli', 'SET', 'orderId1', '100||1'), check=True, capture_output=True, ) subprocess.run( - 'docker exec dapr_redis redis-cli SET orderId2 "200||1"', - shell=True, + ('docker', 'exec', 'dapr_redis', 'redis-cli', 'SET', 'orderId2', '200||1'), check=True, capture_output=True, ) @@ -36,8 +34,7 @@ def test_configuration(dapr, redis_config): ) # Update Redis to trigger the subscription notification subprocess.run( - 'docker exec dapr_redis redis-cli SET orderId2 "210||2"', - shell=True, + ('docker', 'exec', 'dapr_redis', 'redis-cli', 'SET', 'orderId2', '210||2'), check=True, capture_output=True, ) diff --git a/tests/integration/test_crypto.py b/tests/integration/test_crypto.py index 1b7a4c527..0aea4ed36 100644 --- a/tests/integration/test_crypto.py +++ b/tests/integration/test_crypto.py @@ -22,16 +22,16 @@ def crypto_keys(): keys_dir = CRYPTO_DIR / 'keys' keys_dir.mkdir(exist_ok=True) subprocess.run( - 'openssl genpkey -algorithm RSA -pkeyopt rsa_keygen_bits:4096 ' - '-out keys/rsa-private-key.pem', - shell=True, + ( + 'openssl', 'genpkey', '-algorithm', 'RSA', '-pkeyopt', 'rsa_keygen_bits:4096', + '-out', 'keys/rsa-private-key.pem', + ), cwd=CRYPTO_DIR, check=True, capture_output=True, ) subprocess.run( - 'openssl rand -out keys/symmetric-key-256 32', - shell=True, + ('openssl', 'rand', '-out', 'keys/symmetric-key-256', '32'), cwd=CRYPTO_DIR, check=True, capture_output=True, diff --git a/tests/integration/test_invoke_binding.py b/tests/integration/test_invoke_binding.py index 2fdad88fa..002d597c0 100644 --- a/tests/integration/test_invoke_binding.py +++ b/tests/integration/test_invoke_binding.py @@ -19,8 +19,7 @@ def kafka(): try: subprocess.run( - 'docker compose -f ./docker-compose-single-kafka.yml up -d', - shell=True, + ('docker', 'compose', '-f', './docker-compose-single-kafka.yml', 'up', '-d'), cwd=BINDING_DIR, check=True, stdout=subprocess.PIPE, @@ -35,8 +34,7 @@ def kafka(): try: subprocess.run( - 'docker compose -f ./docker-compose-single-kafka.yml down', - shell=True, + ('docker', 'compose', '-f', './docker-compose-single-kafka.yml', 'down'), cwd=BINDING_DIR, check=True, stdout=subprocess.PIPE, diff --git a/tests/integration/test_state_store_query.py b/tests/integration/test_state_store_query.py index 31ab3b1c9..1eca1dc18 100644 --- a/tests/integration/test_state_store_query.py +++ b/tests/integration/test_state_store_query.py @@ -17,15 +17,14 @@ @pytest.fixture() def mongodb(): # Remove leftover container from a previous crashed run - subprocess.run('docker rm -f pytest-mongodb', shell=True, capture_output=True) + subprocess.run(('docker', 'rm', '-f', 'pytest-mongodb'), capture_output=True) subprocess.run( - 'docker run -d --rm -p 27017:27017 --name pytest-mongodb mongo:5', - shell=True, + ('docker', 'run', '-d', '--rm', '-p', '27017:27017', '--name', 'pytest-mongodb', 'mongo:5'), check=True, capture_output=True, ) yield - subprocess.run('docker rm -f pytest-mongodb', shell=True, capture_output=True) + subprocess.run(('docker', 'rm', '-f', 'pytest-mongodb'), capture_output=True) @pytest.fixture() From 3fcd2ac5e51827d390c32799a8db69f41817b8e8 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Tue, 21 Apr 2026 17:25:40 +0200 Subject: [PATCH 14/20] Address PR feedback Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- CLAUDE.md | 9 ++- pyproject.toml | 1 + tests/_process_utils.py | 38 ++++++++++ tests/integration/conftest.py | 70 +++++++++---------- tests/integration/test_configuration.py | 6 +- tests/integration/test_crypto.py | 10 ++- tests/integration/test_demo_actor.py | 4 +- tests/integration/test_grpc_proxying.py | 4 +- tests/integration/test_invoke_binding.py | 11 ++- tests/integration/test_invoke_custom_data.py | 8 +-- tests/integration/test_invoke_http.py | 4 +- tests/integration/test_invoke_simple.py | 6 +- tests/integration/test_jobs.py | 4 +- .../test_langgraph_checkpointer.py | 1 + tests/integration/test_metadata.py | 3 +- tests/integration/test_pubsub_simple.py | 6 +- tests/integration/test_pubsub_streaming.py | 12 ++-- .../test_pubsub_streaming_async.py | 12 ++-- tests/integration/test_state_store_query.py | 28 ++++++-- tests/integration/test_w3c_tracing.py | 4 +- tox.ini | 6 +- 21 files changed, 152 insertions(+), 95 deletions(-) create mode 100644 tests/_process_utils.py diff --git a/CLAUDE.md b/CLAUDE.md index 84e2b1166..490328fe9 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,11 +1,14 @@ @AGENTS.md Use pathlib instead of os.path. +Use httpx instead of urllib. +subprocess(`shell=True`) is used only when it makes the code more readable. Use either shlex or args lists. +subprocess calls should have a reasonable timeout. Use modern Python (3.10+) features. Make all code strongly typed. Keep conditional nesting to a minimum, and use guard clauses when possible. -Aim for medium "visual complexity": use intermediate variables to store results of nested/complex function calls, but don't create a new variable for everything. -Avoid comments unless there is an unusual gotcha, a complex algorithm or anything an experienced code reviewer needs to be aware of. Focus on making better Google-style docstrings instead. +Aim for medium "visual complexity": use intermediate variables to store results of nested/complex function calls, but don't create a new variable for everything. +Avoid comments unless there is a gotcha, a complex algorithm or anything an experienced code reviewer needs to be aware of. Focus on making better Google-style docstrings instead. The user is not always right. Be skeptical and do not blindly comply if something doesn't make sense. -Code should be production-ready. \ No newline at end of file +Code should be cross-platform and production ready. diff --git a/pyproject.toml b/pyproject.toml index f2dc537f0..7f0d3cbb1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,3 +27,4 @@ quote-style = 'single' markers = [ 'example_dir(name): set the example directory for the dapr fixture', ] +pythonpath = ["."] diff --git a/tests/_process_utils.py b/tests/_process_utils.py new file mode 100644 index 000000000..0fb6fd483 --- /dev/null +++ b/tests/_process_utils.py @@ -0,0 +1,38 @@ +"""Cross-platform helpers for managing subprocess trees in tests. + +``dapr run`` spawns ``daprd`` and the user's app as siblings; signaling only +the immediate process can orphan them if the signal isn't forwarded, which +leaves stale listeners on the test ports across runs. Putting the whole +subtree in its own group lets cleanup take them all down together. +""" + +from __future__ import annotations + +import os +import signal +import subprocess +import sys +from typing import Any + + +def get_kwargs_for_process_group() -> dict[str, Any]: + """Popen kwargs that place the child at the head of its own process group.""" + if sys.platform == 'win32': + return {'creationflags': subprocess.CREATE_NEW_PROCESS_GROUP} + return {'start_new_session': True} + + +def terminate_process_group(proc: subprocess.Popen[str], *, force: bool = False) -> None: + """Sends the right termination signal to an entire process group.""" + if sys.platform == 'win32': + if force: + proc.kill() + else: + proc.send_signal(signal.CTRL_BREAK_EVENT) + return + + sig = signal.SIGKILL if force else signal.SIGTERM + try: + os.killpg(os.getpgid(proc.pid), sig) + except ProcessLookupError: + pass diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 212ce140f..33be85c66 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -8,10 +8,16 @@ import pytest +from tests._process_utils import get_kwargs_for_process_group, terminate_process_group + REPO_ROOT = Path(__file__).resolve().parent.parent.parent EXAMPLES_DIR = REPO_ROOT / 'examples' +def pytest_configure(config: pytest.Config) -> None: + config.addinivalue_line('markers', 'example_dir(name): set the example directory for a test') + + class DaprRunner: """Helper to run `dapr run` commands and capture output.""" @@ -20,25 +26,16 @@ def __init__(self, cwd: Path) -> None: self._bg_process: subprocess.Popen[str] | None = None self._bg_output_file: IO[str] | None = None - def _spawn(self, args: str) -> subprocess.Popen[str]: - return subprocess.Popen( - args=('dapr', 'run', *shlex.split(args)), - cwd=self._cwd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - text=True, - ) - @staticmethod def _terminate(proc: subprocess.Popen[str]) -> None: if proc.poll() is not None: return - proc.terminate() + terminate_process_group(proc) try: proc.wait(timeout=10) except subprocess.TimeoutExpired: - proc.kill() + terminate_process_group(proc, force=True) proc.wait() def run(self, args: str, *, timeout: int = 30, until: list[str] | None = None) -> str: @@ -53,73 +50,72 @@ def run(self, args: str, *, timeout: int = 30, until: list[str] | None = None) - until: If provided, the process is terminated as soon as every string in this list has appeared in the accumulated output. """ - proc = self._spawn(args) + proc = subprocess.Popen( + args=('dapr', 'run', *shlex.split(args)), + cwd=self._cwd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + **get_kwargs_for_process_group(), + ) lines: list[str] = [] - remaining = set(until) if until else set() assert proc.stdout is not None # Kill the process if it exceeds the timeout. A background timer is # needed because `for line in proc.stdout` blocks indefinitely when # the child never exits. - timer = threading.Timer(interval=timeout, function=proc.kill) + timer = threading.Timer( + interval=timeout, function=lambda: terminate_process_group(proc, force=True) + ) timer.start() try: for line in proc.stdout: print(line, end='', flush=True) lines.append(line) - if remaining: - output_so_far = ''.join(lines) - remaining = {exp for exp in remaining if exp not in output_so_far} - if not remaining: - break + if until and all(exp in ''.join(lines) for exp in until): + break finally: timer.cancel() self._terminate(proc) return ''.join(lines) - def start(self, args: str, *, wait: int = 5) -> subprocess.Popen[str]: - """Start a long-lived background service and return the process handle. + def start(self, args: str, *, wait: int = 5) -> None: + """Start a long-lived background service. Use this for servers/subscribers that must stay alive while a second process runs via ``run()``. Call ``stop()`` to terminate and collect output. Stdout is written to a temp file to avoid pipe-buffer deadlocks. """ - output_file = tempfile.NamedTemporaryFile(mode='w+', suffix='.log', delete=False) + output_file = tempfile.NamedTemporaryFile(mode='w+', suffix='.log') proc = subprocess.Popen( args=('dapr', 'run', *shlex.split(args)), cwd=self._cwd, stdout=output_file, stderr=subprocess.STDOUT, text=True, + **get_kwargs_for_process_group(), ) self._bg_process = proc self._bg_output_file = output_file time.sleep(wait) - return proc - def stop(self, proc: subprocess.Popen[str]) -> str: - """Stop a background process and return its captured output.""" - self._terminate(proc) + def stop(self) -> str: + """Stop the background service and return its captured output.""" + if self._bg_process is None: + return '' + self._terminate(self._bg_process) self._bg_process = None return self._read_and_close_output() - def cleanup(self) -> None: - """Stop the background process if still running.""" - if self._bg_process is not None: - self._terminate(self._bg_process) - self._bg_process = None - self._read_and_close_output() - def _read_and_close_output(self) -> str: if self._bg_output_file is None: return '' - output_path = Path(self._bg_output_file.name) + self._bg_output_file.seek(0) + output = self._bg_output_file.read() self._bg_output_file.close() self._bg_output_file = None - output = output_path.read_text() - output_path.unlink(missing_ok=True) print(output, end='', flush=True) return output @@ -141,4 +137,4 @@ def test_something(dapr): runner = DaprRunner(cwd) yield runner - runner.cleanup() + runner.stop() diff --git a/tests/integration/test_configuration.py b/tests/integration/test_configuration.py index 6eccdf594..45f76624a 100644 --- a/tests/integration/test_configuration.py +++ b/tests/integration/test_configuration.py @@ -3,6 +3,8 @@ import pytest +REDIS_CONTAINER = 'dapr_redis' + EXPECTED_LINES = [ 'Got key=orderId1 value=100 version=1 metadata={}', 'Got key=orderId2 value=200 version=1 metadata={}', @@ -28,7 +30,7 @@ def redis_config(): @pytest.mark.example_dir('configuration') def test_configuration(dapr, redis_config): - proc = dapr.start( + dapr.start( '--app-id configexample --resources-path components/ -- python3 configuration.py', wait=5, ) @@ -42,6 +44,6 @@ def test_configuration(dapr, redis_config): # Wait long enough for the full script to finish. time.sleep(10) - output = dapr.stop(proc) + output = dapr.stop() for line in EXPECTED_LINES: assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_crypto.py b/tests/integration/test_crypto.py index 0aea4ed36..9e7277195 100644 --- a/tests/integration/test_crypto.py +++ b/tests/integration/test_crypto.py @@ -23,8 +23,14 @@ def crypto_keys(): keys_dir.mkdir(exist_ok=True) subprocess.run( ( - 'openssl', 'genpkey', '-algorithm', 'RSA', '-pkeyopt', 'rsa_keygen_bits:4096', - '-out', 'keys/rsa-private-key.pem', + 'openssl', + 'genpkey', + '-algorithm', + 'RSA', + '-pkeyopt', + 'rsa_keygen_bits:4096', + '-out', + 'keys/rsa-private-key.pem', ), cwd=CRYPTO_DIR, check=True, diff --git a/tests/integration/test_demo_actor.py b/tests/integration/test_demo_actor.py index bef8e5476..e9cd5b444 100644 --- a/tests/integration/test_demo_actor.py +++ b/tests/integration/test_demo_actor.py @@ -29,7 +29,7 @@ @pytest.mark.example_dir('demo_actor/demo_actor') def test_demo_actor(dapr): - service = dapr.start( + dapr.start( '--app-id demo-actor --app-port 3000 -- uvicorn --port 3000 demo_actor_service:app', wait=10, ) @@ -40,6 +40,6 @@ def test_demo_actor(dapr): for line in EXPECTED_CLIENT: assert line in client_output, f'Missing in client output: {line}' - service_output = dapr.stop(service) + service_output = dapr.stop() for line in EXPECTED_SERVICE: assert line in service_output, f'Missing in service output: {line}' diff --git a/tests/integration/test_grpc_proxying.py b/tests/integration/test_grpc_proxying.py index a59f03685..15fffb799 100644 --- a/tests/integration/test_grpc_proxying.py +++ b/tests/integration/test_grpc_proxying.py @@ -7,7 +7,7 @@ @pytest.mark.example_dir('grpc_proxying') def test_grpc_proxying(dapr): - receiver = dapr.start( + dapr.start( '--app-id invoke-receiver --app-protocol grpc --app-port 50051 ' '--config config.yaml -- python invoke-receiver.py', wait=5, @@ -19,4 +19,4 @@ def test_grpc_proxying(dapr): for line in EXPECTED_CALLER: assert line in caller_output, f'Missing in caller output: {line}' - dapr.stop(receiver) + dapr.stop() diff --git a/tests/integration/test_invoke_binding.py b/tests/integration/test_invoke_binding.py index 002d597c0..710c0c1f8 100644 --- a/tests/integration/test_invoke_binding.py +++ b/tests/integration/test_invoke_binding.py @@ -30,6 +30,12 @@ def kafka(): output = (e.stdout or b'').decode(errors='replace') pytest.fail(f'Timed out starting Kafka:\n{output}') + # ``docker compose up -d`` returns once containers are created, but the + # wurstmeister Kafka image takes several seconds of broker registration + # before it can serve metadata. Without this wait, daprd races the broker + # and fails component init with "client has run out of available brokers". + time.sleep(20) + yield try: @@ -48,7 +54,7 @@ def kafka(): @pytest.mark.example_dir('invoke-binding') def test_invoke_binding(dapr, kafka): - receiver = dapr.start( + dapr.start( '--app-id receiver --app-protocol grpc --app-port 50051 ' '--dapr-http-port 3500 --resources-path ./components python3 invoke-input-binding.py', wait=5, @@ -68,7 +74,6 @@ def test_invoke_binding(dapr, kafka): time.sleep(1) - time.sleep(5) - receiver_output = dapr.stop(receiver) + receiver_output = dapr.stop() for line in EXPECTED_MESSAGES: assert line in receiver_output, f'Missing in receiver output: {line}' diff --git a/tests/integration/test_invoke_custom_data.py b/tests/integration/test_invoke_custom_data.py index 11acdc106..c64bb04ed 100644 --- a/tests/integration/test_invoke_custom_data.py +++ b/tests/integration/test_invoke_custom_data.py @@ -13,17 +13,17 @@ @pytest.mark.example_dir('invoke-custom-data') def test_invoke_custom_data(dapr): - receiver = dapr.start( - '--app-id invoke-receiver --app-protocol grpc --app-port 50051 python3 invoke-receiver.py', + dapr.start( + '--app-id invoke-receiver --app-protocol grpc --app-port 50051 -- python3 invoke-receiver.py', wait=5, ) caller_output = dapr.run( - '--app-id invoke-caller --app-protocol grpc python3 invoke-caller.py', + '--app-id invoke-caller --app-protocol grpc -- python3 invoke-caller.py', timeout=30, ) for line in EXPECTED_CALLER: assert line in caller_output, f'Missing in caller output: {line}' - receiver_output = dapr.stop(receiver) + receiver_output = dapr.stop() for line in EXPECTED_RECEIVER: assert line in receiver_output, f'Missing in receiver output: {line}' diff --git a/tests/integration/test_invoke_http.py b/tests/integration/test_invoke_http.py index a7d82ac23..31fc0fd56 100644 --- a/tests/integration/test_invoke_http.py +++ b/tests/integration/test_invoke_http.py @@ -17,7 +17,7 @@ @pytest.mark.example_dir('invoke-http') def test_invoke_http(dapr): - receiver = dapr.start( + dapr.start( '--app-id invoke-receiver --app-port 8088 --app-protocol http ' '-- python3 invoke-receiver.py', wait=5, @@ -29,6 +29,6 @@ def test_invoke_http(dapr): for line in EXPECTED_CALLER: assert line in caller_output, f'Missing in caller output: {line}' - receiver_output = dapr.stop(receiver) + receiver_output = dapr.stop() for line in EXPECTED_RECEIVER: assert line in receiver_output, f'Missing in receiver output: {line}' diff --git a/tests/integration/test_invoke_simple.py b/tests/integration/test_invoke_simple.py index 57e809fdf..b9f900f90 100644 --- a/tests/integration/test_invoke_simple.py +++ b/tests/integration/test_invoke_simple.py @@ -8,9 +8,9 @@ @pytest.mark.example_dir('invoke-simple') def test_invoke_simple(dapr): - receiver = dapr.start( + dapr.start( '--app-id invoke-receiver --app-protocol grpc --app-port 50051 ' - '--dapr-http-port 3500 python3 invoke-receiver.py', + '--dapr-http-port 3500 -- python3 invoke-receiver.py', wait=5, ) @@ -26,6 +26,6 @@ def test_invoke_simple(dapr): assert 'text/plain' in resp.headers.get('content-type', '') assert 'INVOKE_RECEIVED' in resp.text - receiver_output = dapr.stop(receiver) + receiver_output = dapr.stop() for line in EXPECTED_RECEIVER: assert line in receiver_output, f'Missing in receiver output: {line}' diff --git a/tests/integration/test_jobs.py b/tests/integration/test_jobs.py index 61eb67560..591350d39 100644 --- a/tests/integration/test_jobs.py +++ b/tests/integration/test_jobs.py @@ -41,10 +41,10 @@ def test_job_management(dapr): @pytest.mark.example_dir('jobs') def test_job_processing(dapr): - proc = dapr.start( + dapr.start( '--app-id jobs-workflow --app-protocol grpc --app-port 50051 python3 job_processing.py', wait=15, ) - output = dapr.stop(proc) + output = dapr.stop() for line in EXPECTED_PROCESSING: assert line in output, f'Missing in output: {line}' diff --git a/tests/integration/test_langgraph_checkpointer.py b/tests/integration/test_langgraph_checkpointer.py index 9754d60e1..07f58788f 100644 --- a/tests/integration/test_langgraph_checkpointer.py +++ b/tests/integration/test_langgraph_checkpointer.py @@ -24,6 +24,7 @@ def _ollama_ready() -> bool: def _model_available() -> bool: try: resp = httpx.get(f'{OLLAMA_URL}/api/tags', timeout=5) + resp.raise_for_status() except httpx.RequestError: return False diff --git a/tests/integration/test_metadata.py b/tests/integration/test_metadata.py index 5beef49da..fb9641342 100644 --- a/tests/integration/test_metadata.py +++ b/tests/integration/test_metadata.py @@ -15,7 +15,8 @@ @pytest.mark.example_dir('metadata') def test_metadata(dapr): output = dapr.run( - '--app-id=my-metadata-app --app-protocol grpc --resources-path components/ python3 app.py', + '--app-id=my-metadata-app --app-protocol grpc --resources-path components/ ' + '-- python3 app.py', timeout=10, ) for line in EXPECTED_LINES: diff --git a/tests/integration/test_pubsub_simple.py b/tests/integration/test_pubsub_simple.py index 4d18eba61..8dc63508f 100644 --- a/tests/integration/test_pubsub_simple.py +++ b/tests/integration/test_pubsub_simple.py @@ -1,4 +1,3 @@ -import time import pytest EXPECTED_SUBSCRIBER = [ @@ -24,7 +23,7 @@ @pytest.mark.example_dir('pubsub-simple') def test_pubsub_simple(dapr): - subscriber = dapr.start( + dapr.start( '--app-id python-subscriber --app-protocol grpc --app-port 50051 -- python3 subscriber.py', wait=5, ) @@ -36,7 +35,6 @@ def test_pubsub_simple(dapr): for line in EXPECTED_PUBLISHER: assert line in publisher_output, f'Missing in publisher output: {line}' - time.sleep(5) - subscriber_output = dapr.stop(subscriber) + subscriber_output = dapr.stop() for line in EXPECTED_SUBSCRIBER: assert line in subscriber_output, f'Missing in subscriber output: {line}' diff --git a/tests/integration/test_pubsub_streaming.py b/tests/integration/test_pubsub_streaming.py index 81b3055bd..7e6ed0298 100644 --- a/tests/integration/test_pubsub_streaming.py +++ b/tests/integration/test_pubsub_streaming.py @@ -1,5 +1,3 @@ -import time - import pytest EXPECTED_SUBSCRIBER = [ @@ -31,7 +29,7 @@ @pytest.mark.example_dir('pubsub-streaming') def test_pubsub_streaming(dapr): - subscriber = dapr.start( + dapr.start( '--app-id python-subscriber --app-protocol grpc -- python3 subscriber.py --topic=TOPIC_A1', wait=5, ) @@ -43,15 +41,14 @@ def test_pubsub_streaming(dapr): for line in EXPECTED_PUBLISHER: assert line in publisher_output, f'Missing in publisher output: {line}' - time.sleep(5) - subscriber_output = dapr.stop(subscriber) + subscriber_output = dapr.stop() for line in EXPECTED_SUBSCRIBER: assert line in subscriber_output, f'Missing in subscriber output: {line}' @pytest.mark.example_dir('pubsub-streaming') def test_pubsub_streaming_handler(dapr): - subscriber = dapr.start( + dapr.start( '--app-id python-subscriber --app-protocol grpc -- python3 subscriber-handler.py --topic=TOPIC_A2', wait=5, ) @@ -63,7 +60,6 @@ def test_pubsub_streaming_handler(dapr): for line in EXPECTED_PUBLISHER: assert line in publisher_output, f'Missing in publisher output: {line}' - time.sleep(5) - subscriber_output = dapr.stop(subscriber) + subscriber_output = dapr.stop() for line in EXPECTED_HANDLER_SUBSCRIBER: assert line in subscriber_output, f'Missing in subscriber output: {line}' diff --git a/tests/integration/test_pubsub_streaming_async.py b/tests/integration/test_pubsub_streaming_async.py index d98a9a670..4ea446968 100644 --- a/tests/integration/test_pubsub_streaming_async.py +++ b/tests/integration/test_pubsub_streaming_async.py @@ -1,5 +1,3 @@ -import time - import pytest EXPECTED_SUBSCRIBER = [ @@ -31,7 +29,7 @@ @pytest.mark.example_dir('pubsub-streaming-async') def test_pubsub_streaming_async(dapr): - subscriber = dapr.start( + dapr.start( '--app-id python-subscriber --app-protocol grpc -- python3 subscriber.py --topic=TOPIC_B1', wait=5, ) @@ -43,15 +41,14 @@ def test_pubsub_streaming_async(dapr): for line in EXPECTED_PUBLISHER: assert line in publisher_output, f'Missing in publisher output: {line}' - time.sleep(5) - subscriber_output = dapr.stop(subscriber) + subscriber_output = dapr.stop() for line in EXPECTED_SUBSCRIBER: assert line in subscriber_output, f'Missing in subscriber output: {line}' @pytest.mark.example_dir('pubsub-streaming-async') def test_pubsub_streaming_async_handler(dapr): - subscriber = dapr.start( + dapr.start( '--app-id python-subscriber --app-protocol grpc -- python3 subscriber-handler.py --topic=TOPIC_B2', wait=5, ) @@ -63,7 +60,6 @@ def test_pubsub_streaming_async_handler(dapr): for line in EXPECTED_PUBLISHER: assert line in publisher_output, f'Missing in publisher output: {line}' - time.sleep(5) - subscriber_output = dapr.stop(subscriber) + subscriber_output = dapr.stop() for line in EXPECTED_HANDLER_SUBSCRIBER: assert line in subscriber_output, f'Missing in subscriber output: {line}' diff --git a/tests/integration/test_state_store_query.py b/tests/integration/test_state_store_query.py index 1eca1dc18..7de1c1f1a 100644 --- a/tests/integration/test_state_store_query.py +++ b/tests/integration/test_state_store_query.py @@ -1,7 +1,11 @@ import subprocess +from pathlib import Path +import httpx import pytest +EXAMPLES_DIR = Path(__file__).resolve().parent.parent.parent / 'examples' + EXPECTED_LINES = [ '1 {"city": "Seattle", "person": {"id": 1036.0, "org": "Dev Ops"}, "state": "WA"}', '4 {"city": "Spokane", "person": {"id": 1042.0, "org": "Dev Ops"}, "state": "WA"}', @@ -29,13 +33,23 @@ def mongodb(): @pytest.fixture() def import_data(mongodb, dapr): - """Import the test dataset into the state store via Dapr.""" - dapr.run( - '--app-id demo --dapr-http-port 3500 --resources-path components ' - '-- curl -X POST -H "Content-Type: application/json" ' - '-d @dataset.json http://localhost:3500/v1.0/state/statestore', - timeout=15, - ) + """Seed the test dataset via Dapr's state API. + + The seeding has to go through a Dapr sidecar, not directly to MongoDB: + ``state.mongodb`` wraps every value as ``{_id, value, etag, _ttl}`` (see + ``components-contrib/state/mongodb/mongodb.go``), and the query example + reads these back through the same component. Writing raw documents with + pymongo would skip that encoding and the query would return nothing. + """ + dapr.start('--app-id demo --dapr-http-port 3500 --resources-path components', wait=5) + dataset = (EXAMPLES_DIR / 'state_store_query' / 'dataset.json').read_text() + httpx.post( + 'http://localhost:3500/v1.0/state/statestore', + content=dataset, + headers={'Content-Type': 'application/json'}, + timeout=10, + ).raise_for_status() + dapr.stop() @pytest.mark.example_dir('state_store_query') diff --git a/tests/integration/test_w3c_tracing.py b/tests/integration/test_w3c_tracing.py index 4cd53780a..2c605ad9c 100644 --- a/tests/integration/test_w3c_tracing.py +++ b/tests/integration/test_w3c_tracing.py @@ -11,7 +11,7 @@ @pytest.mark.example_dir('w3c-tracing') def test_w3c_tracing(dapr): - receiver = dapr.start( + dapr.start( '--app-id invoke-receiver --app-protocol grpc --app-port 3001 python3 invoke-receiver.py', wait=5, ) @@ -22,4 +22,4 @@ def test_w3c_tracing(dapr): for line in EXPECTED_CALLER: assert line in caller_output, f'Missing in caller output: {line}' - dapr.stop(receiver) + dapr.stop() diff --git a/tox.ini b/tox.ini index 809d8cc77..b4dbd0423 100644 --- a/tox.ini +++ b/tox.ini @@ -21,7 +21,7 @@ commands = coverage xml commands_pre = - pip uninstall -y dapr dapr-ext-grpc dapr-ext-fastapi dapr-ext-langgraph dapr-ext-strands dapr-ext-flask dapr-ext-langgraph dapr-ext-strands + pip uninstall -y dapr dapr-ext-grpc dapr-ext-fastapi dapr-ext-langgraph dapr-ext-strands flask-dapr pip install -r dev-requirements.txt \ -e {toxinidir}/ \ -e {toxinidir}/ext/dapr-ext-workflow/ \ @@ -51,7 +51,7 @@ commands = allowlist_externals=* commands_pre = - pip uninstall -y dapr dapr-ext-grpc dapr-ext-fastapi dapr-ext-langgraph dapr-ext-strands dapr-ext-flask dapr-ext-langgraph dapr-ext-strands + pip uninstall -y dapr dapr-ext-grpc dapr-ext-fastapi dapr-ext-langgraph dapr-ext-strands flask-dapr pip install -r {toxinidir}/dev-requirements.txt \ -e {toxinidir}/ \ -e {toxinidir}/ext/dapr-ext-workflow/ \ @@ -70,7 +70,7 @@ usedevelop = False commands = mypy --config-file mypy.ini commands_pre = - pip uninstall -y dapr dapr-ext-grpc dapr-ext-fastapi dapr-ext-langgraph dapr-ext-strands dapr-ext-flask dapr-ext-langgraph dapr-ext-strands + pip uninstall -y dapr dapr-ext-grpc dapr-ext-fastapi dapr-ext-langgraph dapr-ext-strands flask-dapr pip install -r dev-requirements.txt \ -e {toxinidir}/ \ -e {toxinidir}/ext/dapr-ext-workflow/ \ From 2766a45cf99c43a4c2601e090ee6bd58e2f57b26 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Tue, 21 Apr 2026 21:33:48 +0200 Subject: [PATCH 15/20] Use static crypto keys Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- examples/crypto/keys/rsa-private-key.pem | 52 ++++++++++++++++++++++++ examples/crypto/keys/symmetric-key-256 | 1 + tests/integration/test_crypto.py | 36 ++++------------ 3 files changed, 60 insertions(+), 29 deletions(-) create mode 100644 examples/crypto/keys/rsa-private-key.pem create mode 100644 examples/crypto/keys/symmetric-key-256 diff --git a/examples/crypto/keys/rsa-private-key.pem b/examples/crypto/keys/rsa-private-key.pem new file mode 100644 index 000000000..a2de6762a --- /dev/null +++ b/examples/crypto/keys/rsa-private-key.pem @@ -0,0 +1,52 @@ +-----BEGIN PRIVATE KEY----- +MIIJQQIBADANBgkqhkiG9w0BAQEFAASCCSswggknAgEAAoICAQDNPVTQbkKW4eja +oCDnsacryzQF/Q4vAMfl7oMjtVm9QHM/k5AJqPDJQhjollQsRzJ5g8FkPdSUnlio +C+yd+3lSmKx2j5LC6F5YIkzFJOjVU5rQwAxJkbpVhoXF2t4b5UXrbZxgoU/tjnAc +FIIPCALbbm02HSb2SZQYPMgDGI+wZ11HI2AyAqAftfCrdmT8PkW0+XmPRz3auSWm +5o0I4v7chx7T1LN6Y7xAXd+3sE7CWhWR5cGpabxAZ2Zz5u48vdBhF/5hh6R+Ebov +jXW6i5ZpU5DEk6z3aU1fbciCZqIkIFHD8rErwMJasnZhNys/4U5R1xM86IVwI1Eb +fa3AnbtE3Vj66TUSwTWM664xoxuNCvK2/v33Mbwg2AM9a8Nd9n6dNIdQ3Ryh1gtF +SK7yyA95E53mHt6l47m6/qt5dXehngBK79vlE0gkJCxLuzf35UgTsrEBl601dXp5 +SQHkjRFnToDzNTeiWzlzLcBu50lDyPMXtBkjop6mSKrtFsb4puXEd/WVgNlcfSZe +TK0ASmCuXdBia8kdVUlDizYEWtz5T+E4r3OmWiflxH4YAFrhxT++fNe1WO3+Y1uu +XAR7RzU7eE68ETDwvLkYCvraDP1iMXGeTMCjIZdu49vLNMdpFsJPVAW5bAz2kJOr +9sLSLoHGwAn+WWE4NhfHeInYstA2YwIDAQABAoICABaUSsJrfvPugpmayEP1LXkJ +7/1Aq9DL+zH2nYLOLsM9VfCGoXAOn/7kQK1F7Ji6dHkd36bRjpOTIBnMxglTYzON +DFw2y2SZ/9ceXufJebwOaJfSqQdm+uLx28G6pHjZLmoKMwwGcy6lXvwX3X8d2IKf +kXBEoMazrZFFDpQYnaZAmOh8odaep1MVxxZ1/gIqL60LTS5QHiPz/opwDtANeRB1 +5RRU8DHkyw8hxL0GroN/OaRFbJrgwQ8s0P6rR0Zzc3tbEmdUbupXtO4KWAtf0/pe +cSzPOlY1xYdcIpUGCYyD6bru9kLj//3OaGulkCKE/QLP8JPg2N1PZVrq5rSsJa/p +YFoJR5uvK02YPJA/+tWbd78WRXt6iPcARkcwB5YDk7hAbuzYGFrU0CNRtY2bPTOX +n4ogkHx8921/nxLlsf0SMzeZWGPb9/rbAUmM/TZJXSHy5XgeiTckI2HA5tyN4QBT +Yues38Aefda46oSTqo135A3D3MbCHeGu401+zlgftV4IuF0XApGyRWK67E3VVmoA +0hvmkzmC/qNgawR5lkk08+ZpyDUnT42RI+KsO9cRE4vRSJiVZdjFAE3rcf8R2gyQ +Xf3liFicV3YlpoxGB3/AO510wVq0yNfbCOhJQ54fA/ZVE97zIC7HhmFCcB6coygm +uXyYGePwDH6bo66/F83RAoIBAQDpmHwI/K9MdZKF7geS3MPQOKAGby6vncgvApeL +rGxM8ScO1QxV8uQ3emvKS0KLvMRDtpyPz4aHzq00DaEI7UYPxCGsN+/pf/PyI+Tm +WrfQXOZUjTL/0CsSXmwcvGQVMruB3cjrmj7B9RPH0jIZv+esNfH6u7gpvrNgbqxs +PneEN1XtFxe08G91R0hN07ggbhqqUChW4hbytl/KqVDlYPCKGZfDIigBTI5vsd4L +KtMGfZ5fW6acj8Dn3A8hzYHnNXI1E1mAl9Zu+TRW/pDaaPBoKqhodSSDAb0RoJGc +y1bZbWSy3QoAYN0wla/kE6P3LQ7diMtmj3d3b3ChSI0Mx5w/AoIBAQDg7J1+zcO4 +rH5a4U9TYnLwQfzooXfokuTv09uxH3bE+Q0vdEyofxCXbh6qUK5upGmCda0dWKdw +OxGEk/TNOl7Qw3J1R1CLJVPPCU4b/d7qi3oPaF523cMdEpxS85KfA3LDOFMgqTZ/ +RyuIQbH3iS1w+gRsFYh8DDJdcSSu8RKjX8JVhkz2UQFPfA8YqRvLNUf1QSRmB53Y +zeNJ367SV4FzJym0VqTsiaVHQPpBXawltGNn0eqXNpv4TOLpQ3Q7Y1S3Y39prLJ1 +g5Ufr87kwh0BwS8dXDOgF43ATyHwwPCOo1ZjudVyqYvJVV+ITZJ1eZ3l/0U2nnsD +PYNcZKVhfKzdAoIBAENFB0srQWw6W4S4JHQ1oSpAdE0GDaLDRFfNXkj50YJi3AWY +cuH5faFAXvQ1sic9qCN73iBH+gz4Bsb7uckxU0DNEYlf3nYWw/CSR6PSsiaN6kKl +Gu+ySgUTLf0kf4nfP0JJ1UeL9tCyPA0KSiVCL3xXWKUFFCbpZQy7MmpFnvNzYApT +4R7ZMq/KZFcNRnQIYSN0y/khSMyCmplpIwO7Y+nRLvQhzPV6z3X4+eGrZnPzDv2V +Dij9+OaMZ8srPGKR8J66QMcYcscoetsmmh5bpAfLaQ4T1fzoLkN6QxStNgiNSTd9 +EhlDy87m/G0o/sn6rtI7R5/0Zsn9TKkVlJD+ls8CggEANPklQrcdcIIXpDnKX/4g +ydsQwI0+22S1TJKd/EJHy65IX7PJVinO84s4563m1yIbw2EJq460qKcQwiPClQ85 +Q3u0mlB4dL0O1wT/A3KwLJc64SQYk3A5QsCeVp8NGixKvBWo5llT/3f4lbe7PWxu +alxH7FjJ80VAG2fJVvZqCFZGQ7RErgJ4B4tVVt6FMD/VObrk4q7Ki0Q6Uqy+1MVN +NJy1osaBQ0BLz9NK3Vg9cgfhHZN/56sx4rHhA0Uiu9XyHtrtKCtHQIwD9BmI5bGd ++UrRWN3dPsgtV2yLttMKFN39O7GJxt6NkJZt0IFMjCRffsq3N1zt5d538Ku3k5U0 +dQKCAQBT5ZpGpuGeG2RI4lzF2iejApZ8Qa7YmTGem7M2T062wlhgyBNogKXxbrl/ +TyvpB5gXSkcCMmdD8727WJNUnnX7EWk+zzqBF1mn6KGoar23YDHLuMKxv6NEF8kI +D4l92SpMJNWkQaoOLKwNz8x8bJ8uYutLLJlDjnUpbdMbUgnw8Nkcflfr3nAKZd5e +BJ46tSNjMV9KyQd5b+pietirVyS3afJaPJNE6Uu8VIPbbxApAW3dfIQznIwgx62E +bWBtDNguJzLLv4zJ+XhcOEIdgAaNBUsT+owfF0ok6EEBzIl51pSo7w4Nh5PkMw4d +VfTYN1T7nfugAi8VqPcL/5ZKQzIz +-----END PRIVATE KEY----- diff --git a/examples/crypto/keys/symmetric-key-256 b/examples/crypto/keys/symmetric-key-256 new file mode 100644 index 000000000..e9a909954 --- /dev/null +++ b/examples/crypto/keys/symmetric-key-256 @@ -0,0 +1 @@ +sÔßsŸÁÆ¿kU>A@{0ÕûZJýlÀ“CQõ \ No newline at end of file diff --git a/tests/integration/test_crypto.py b/tests/integration/test_crypto.py index 9e7277195..881159aa2 100644 --- a/tests/integration/test_crypto.py +++ b/tests/integration/test_crypto.py @@ -1,5 +1,3 @@ -import shutil -import subprocess from pathlib import Path import pytest @@ -18,38 +16,18 @@ @pytest.fixture() -def crypto_keys(): - keys_dir = CRYPTO_DIR / 'keys' - keys_dir.mkdir(exist_ok=True) - subprocess.run( - ( - 'openssl', - 'genpkey', - '-algorithm', - 'RSA', - '-pkeyopt', - 'rsa_keygen_bits:4096', - '-out', - 'keys/rsa-private-key.pem', - ), - cwd=CRYPTO_DIR, - check=True, - capture_output=True, - ) - subprocess.run( - ('openssl', 'rand', '-out', 'keys/symmetric-key-256', '32'), - cwd=CRYPTO_DIR, - check=True, - capture_output=True, - ) +def crypto_artifacts(): + """Clean up output files written by the crypto example on teardown. + + Example RSA and AES keys are in ``examples/crypto/keys/``. + """ yield - shutil.rmtree(keys_dir, ignore_errors=True) (CRYPTO_DIR / 'encrypted.out').unlink(missing_ok=True) (CRYPTO_DIR / 'decrypted.out.jpg').unlink(missing_ok=True) @pytest.mark.example_dir('crypto') -def test_crypto(dapr, crypto_keys): +def test_crypto(dapr, crypto_artifacts): output = dapr.run( '--app-id crypto --resources-path ./components/ -- python3 crypto.py', timeout=30, @@ -60,7 +38,7 @@ def test_crypto(dapr, crypto_keys): @pytest.mark.example_dir('crypto') -def test_crypto_async(dapr, crypto_keys): +def test_crypto_async(dapr, crypto_artifacts): output = dapr.run( '--app-id crypto-async --resources-path ./components/ -- python3 crypto-async.py', timeout=30, From f01a7fd4ae50db253e3470fd247a89d7a6f8b490 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Tue, 21 Apr 2026 21:37:15 +0200 Subject: [PATCH 16/20] Add missing -- separators Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tests/integration/test_demo_actor.py | 2 +- tests/integration/test_distributed_lock.py | 2 +- tests/integration/test_invoke_binding.py | 2 +- tests/integration/test_jobs.py | 2 +- tests/integration/test_w3c_tracing.py | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/integration/test_demo_actor.py b/tests/integration/test_demo_actor.py index e9cd5b444..9fb452c6f 100644 --- a/tests/integration/test_demo_actor.py +++ b/tests/integration/test_demo_actor.py @@ -34,7 +34,7 @@ def test_demo_actor(dapr): wait=10, ) client_output = dapr.run( - '--app-id demo-client python3 demo_actor_client.py', + '--app-id demo-client -- python3 demo_actor_client.py', timeout=60, ) for line in EXPECTED_CLIENT: diff --git a/tests/integration/test_distributed_lock.py b/tests/integration/test_distributed_lock.py index a2ac0c6b0..47e243c90 100644 --- a/tests/integration/test_distributed_lock.py +++ b/tests/integration/test_distributed_lock.py @@ -14,7 +14,7 @@ @pytest.mark.example_dir('distributed_lock') def test_distributed_lock(dapr): output = dapr.run( - '--app-id=locksapp --app-protocol grpc --resources-path components/ python3 lock.py', + '--app-id=locksapp --app-protocol grpc --resources-path components/ -- python3 lock.py', timeout=10, ) for line in EXPECTED_LINES: diff --git a/tests/integration/test_invoke_binding.py b/tests/integration/test_invoke_binding.py index 710c0c1f8..4537a231e 100644 --- a/tests/integration/test_invoke_binding.py +++ b/tests/integration/test_invoke_binding.py @@ -56,7 +56,7 @@ def kafka(): def test_invoke_binding(dapr, kafka): dapr.start( '--app-id receiver --app-protocol grpc --app-port 50051 ' - '--dapr-http-port 3500 --resources-path ./components python3 invoke-input-binding.py', + '--dapr-http-port 3500 --resources-path ./components -- python3 invoke-input-binding.py', wait=5, ) diff --git a/tests/integration/test_jobs.py b/tests/integration/test_jobs.py index 591350d39..d6450ba24 100644 --- a/tests/integration/test_jobs.py +++ b/tests/integration/test_jobs.py @@ -42,7 +42,7 @@ def test_job_management(dapr): @pytest.mark.example_dir('jobs') def test_job_processing(dapr): dapr.start( - '--app-id jobs-workflow --app-protocol grpc --app-port 50051 python3 job_processing.py', + '--app-id jobs-workflow --app-protocol grpc --app-port 50051 -- python3 job_processing.py', wait=15, ) output = dapr.stop() diff --git a/tests/integration/test_w3c_tracing.py b/tests/integration/test_w3c_tracing.py index 2c605ad9c..6e9a58814 100644 --- a/tests/integration/test_w3c_tracing.py +++ b/tests/integration/test_w3c_tracing.py @@ -12,11 +12,11 @@ @pytest.mark.example_dir('w3c-tracing') def test_w3c_tracing(dapr): dapr.start( - '--app-id invoke-receiver --app-protocol grpc --app-port 3001 python3 invoke-receiver.py', + '--app-id invoke-receiver --app-protocol grpc --app-port 3001 -- python3 invoke-receiver.py', wait=5, ) caller_output = dapr.run( - '--app-id invoke-caller --app-protocol grpc python3 invoke-caller.py', + '--app-id invoke-caller --app-protocol grpc -- python3 invoke-caller.py', timeout=30, ) for line in EXPECTED_CALLER: From a752fdeaf22e32e1faa001553827724d08809f77 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Tue, 21 Apr 2026 21:48:02 +0200 Subject: [PATCH 17/20] Add missing -- separators to examples/ READMEs Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- examples/demo_actor/README.md | 2 +- examples/distributed_lock/README.md | 2 +- examples/invoke-binding/README.md | 4 ++-- examples/invoke-custom-data/README.md | 4 ++-- examples/invoke-simple/README.md | 4 ++-- examples/jobs/README.md | 2 +- examples/metadata/README.md | 2 +- examples/secret_store/README.md | 4 ++-- examples/w3c-tracing/README.md | 4 ++-- examples/workflow/README.md | 18 +++++++++--------- 10 files changed, 23 insertions(+), 23 deletions(-) diff --git a/examples/demo_actor/README.md b/examples/demo_actor/README.md index de7ef595c..23d267deb 100644 --- a/examples/demo_actor/README.md +++ b/examples/demo_actor/README.md @@ -99,7 +99,7 @@ expected_stdout_lines: ```bash # Run actor client cd demo_actor - dapr run --app-id demo-client python3 demo_actor_client.py + dapr run --app-id demo-client -- python3 demo_actor_client.py ``` Expected output: diff --git a/examples/distributed_lock/README.md b/examples/distributed_lock/README.md index 599d7c1d1..1c60e6c85 100644 --- a/examples/distributed_lock/README.md +++ b/examples/distributed_lock/README.md @@ -40,7 +40,7 @@ timeout_seconds: 5 --> ```bash -dapr run --app-id=locksapp --app-protocol grpc --resources-path components/ python3 lock.py +dapr run --app-id=locksapp --app-protocol grpc --resources-path components/ -- python3 lock.py ``` diff --git a/examples/invoke-binding/README.md b/examples/invoke-binding/README.md index 36413d10c..6126ea00e 100644 --- a/examples/invoke-binding/README.md +++ b/examples/invoke-binding/README.md @@ -47,7 +47,7 @@ sleep: 5 2. Start Receiver (expose gRPC server receiver on port 50051) ```bash -dapr run --app-id receiver --app-protocol grpc --app-port 50051 --resources-path ./components python3 invoke-input-binding.py +dapr run --app-id receiver --app-protocol grpc --app-port 50051 --resources-path ./components -- python3 invoke-input-binding.py ``` @@ -67,7 +67,7 @@ sleep: 5 --> ```bash -dapr run --app-id publisher --app-protocol grpc --resources-path ./components python3 invoke-output-binding.py +dapr run --app-id publisher --app-protocol grpc --resources-path ./components -- python3 invoke-output-binding.py ``` diff --git a/examples/invoke-custom-data/README.md b/examples/invoke-custom-data/README.md index 1a1ab3881..b5426c722 100644 --- a/examples/invoke-custom-data/README.md +++ b/examples/invoke-custom-data/README.md @@ -39,7 +39,7 @@ sleep: 5 --> ```bash - dapr run --app-id invoke-receiver --app-protocol grpc --app-port 50051 python3 invoke-receiver.py + dapr run --app-id invoke-receiver --app-protocol grpc --app-port 50051 -- python3 invoke-receiver.py ``` @@ -58,7 +58,7 @@ sleep: 5 --> ```bash - dapr run --app-id invoke-caller --app-protocol grpc python3 invoke-caller.py + dapr run --app-id invoke-caller --app-protocol grpc -- python3 invoke-caller.py ``` diff --git a/examples/invoke-simple/README.md b/examples/invoke-simple/README.md index c8f452a6f..35fd2b9ed 100644 --- a/examples/invoke-simple/README.md +++ b/examples/invoke-simple/README.md @@ -32,7 +32,7 @@ sleep: 5 ```bash # 1. Start Receiver (expose gRPC server receiver on port 50051) -dapr run --app-id invoke-receiver --app-protocol grpc --app-port 50051 python3 invoke-receiver.py +dapr run --app-id invoke-receiver --app-protocol grpc --app-port 50051 -- python3 invoke-receiver.py ``` @@ -52,7 +52,7 @@ sleep: 5 ```bash # 2. Start Caller -dapr run --app-id invoke-caller --app-protocol grpc --dapr-http-port 3500 python3 invoke-caller.py +dapr run --app-id invoke-caller --app-protocol grpc --dapr-http-port 3500 -- python3 invoke-caller.py ``` diff --git a/examples/jobs/README.md b/examples/jobs/README.md index 3964abf68..5c0fb514f 100644 --- a/examples/jobs/README.md +++ b/examples/jobs/README.md @@ -93,7 +93,7 @@ sleep: 15 ```bash # Start the complete workflow example (schedules jobs and handles job events) -dapr run --app-id jobs-workflow --app-protocol grpc --app-port 50051 python3 job_processing.py +dapr run --app-id jobs-workflow --app-protocol grpc --app-port 50051 -- python3 job_processing.py ``` diff --git a/examples/metadata/README.md b/examples/metadata/README.md index 9940acd5b..ee3d9d20c 100644 --- a/examples/metadata/README.md +++ b/examples/metadata/README.md @@ -49,7 +49,7 @@ timeout_seconds: 10 --> ```bash -dapr run --app-id=my-metadata-app --app-protocol grpc --resources-path components/ python3 app.py +dapr run --app-id=my-metadata-app --app-protocol grpc --resources-path components/ -- python3 app.py ``` diff --git a/examples/secret_store/README.md b/examples/secret_store/README.md index 75b5a5ca1..d384c6fc2 100644 --- a/examples/secret_store/README.md +++ b/examples/secret_store/README.md @@ -40,7 +40,7 @@ timeout_seconds: 2 --> ```bash -dapr run --app-id=secretsapp --app-protocol grpc --resources-path components/ python3 example.py +dapr run --app-id=secretsapp --app-protocol grpc --resources-path components/ -- python3 example.py ``` @@ -87,7 +87,7 @@ timeout_seconds: 2 --> ```bash -dapr run --app-id=secretsapp --app-protocol grpc --config config.yaml --resources-path components/ python3 example.py +dapr run --app-id=secretsapp --app-protocol grpc --config config.yaml --resources-path components/ -- python3 example.py ``` diff --git a/examples/w3c-tracing/README.md b/examples/w3c-tracing/README.md index 0dc892ed8..d63c3b544 100644 --- a/examples/w3c-tracing/README.md +++ b/examples/w3c-tracing/README.md @@ -140,7 +140,7 @@ sleep: 5 --> ```sh -dapr run --app-id invoke-receiver --app-protocol grpc --app-port 3001 python3 invoke-receiver.py +dapr run --app-id invoke-receiver --app-protocol grpc --app-port 3001 -- python3 invoke-receiver.py ``` @@ -243,7 +243,7 @@ sleep: 10 --> ```bash -dapr run --app-id invoke-caller --app-protocol grpc python3 invoke-caller.py +dapr run --app-id invoke-caller --app-protocol grpc -- python3 invoke-caller.py ``` diff --git a/examples/workflow/README.md b/examples/workflow/README.md index ecc28193d..9d37a1105 100644 --- a/examples/workflow/README.md +++ b/examples/workflow/README.md @@ -361,9 +361,9 @@ sleep: 20 --> ```sh -dapr run --app-id wfexample3 python3 multi-app3.py & -dapr run --app-id wfexample2 python3 multi-app2.py & -dapr run --app-id wfexample1 python3 multi-app1.py +dapr run --app-id wfexample3 -- python3 multi-app3.py & +dapr run --app-id wfexample2 -- python3 multi-app2.py & +dapr run --app-id wfexample1 -- python3 multi-app1.py ``` @@ -404,9 +404,9 @@ sleep: 20 ```sh export ERROR_ACTIVITY_MODE=true -dapr run --app-id wfexample3 python3 multi-app3.py & -dapr run --app-id wfexample2 python3 multi-app2.py & -dapr run --app-id wfexample1 python3 multi-app1.py +dapr run --app-id wfexample3 -- python3 multi-app3.py & +dapr run --app-id wfexample2 -- python3 multi-app2.py & +dapr run --app-id wfexample1 -- python3 multi-app1.py ``` @@ -445,9 +445,9 @@ sleep: 20 ```sh export ERROR_WORKFLOW_MODE=true -dapr run --app-id wfexample3 python3 multi-app3.py & -dapr run --app-id wfexample2 python3 multi-app2.py & -dapr run --app-id wfexample1 python3 multi-app1.py +dapr run --app-id wfexample3 -- python3 multi-app3.py & +dapr run --app-id wfexample2 -- python3 multi-app2.py & +dapr run --app-id wfexample1 -- python3 multi-app1.py ``` From 260b30f78968470e6aec28eb157020493812d89f Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Tue, 21 Apr 2026 23:54:46 +0200 Subject: [PATCH 18/20] Address PR comments (2) Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- examples/demo_actor/demo_actor/demo_actor_client.py | 2 +- tests/integration/test_demo_actor.py | 2 +- tests/integration/test_grpc_proxying.py | 3 +-- tox.ini | 3 +-- 4 files changed, 4 insertions(+), 6 deletions(-) diff --git a/examples/demo_actor/demo_actor/demo_actor_client.py b/examples/demo_actor/demo_actor/demo_actor_client.py index ad0dfccb6..1b9f457f0 100644 --- a/examples/demo_actor/demo_actor/demo_actor_client.py +++ b/examples/demo_actor/demo_actor/demo_actor_client.py @@ -29,7 +29,7 @@ async def main(): # non-remoting actor invocation print('call actor method via proxy.invoke_method()', flush=True) rtn_bytes = await proxy.invoke_method('GetMyData') - print(rtn_bytes, flush=True) + print(rtn_bytes.decode(), flush=True) # RPC style using python duck-typing print('call actor method using rpc style', flush=True) rtn_obj = await proxy.GetMyData() diff --git a/tests/integration/test_demo_actor.py b/tests/integration/test_demo_actor.py index 9fb452c6f..b0d1fd2a6 100644 --- a/tests/integration/test_demo_actor.py +++ b/tests/integration/test_demo_actor.py @@ -14,7 +14,7 @@ EXPECTED_CLIENT = [ 'call actor method via proxy.invoke_method()', - "b'null'", + 'null', 'call actor method using rpc style', 'None', 'call SetMyData actor method to save the state', diff --git a/tests/integration/test_grpc_proxying.py b/tests/integration/test_grpc_proxying.py index 15fffb799..8f1ce51d3 100644 --- a/tests/integration/test_grpc_proxying.py +++ b/tests/integration/test_grpc_proxying.py @@ -8,8 +8,7 @@ @pytest.mark.example_dir('grpc_proxying') def test_grpc_proxying(dapr): dapr.start( - '--app-id invoke-receiver --app-protocol grpc --app-port 50051 ' - '--config config.yaml -- python invoke-receiver.py', + '--app-id invoke-receiver --app-protocol grpc --app-port 50051 --config config.yaml -- python invoke-receiver.py', wait=5, ) caller_output = dapr.run( diff --git a/tox.ini b/tox.ini index b4dbd0423..de0b30a2d 100644 --- a/tox.ini +++ b/tox.ini @@ -61,8 +61,7 @@ commands_pre = -e {toxinidir}/ext/dapr-ext-strands/ \ -e {toxinidir}/ext/flask_dapr/ \ opentelemetry-exporter-zipkin \ - langchain-ollama \ - uvicorn + langchain-ollama [testenv:type] basepython = python3 From 4e458b1125a5a719567e51bf9942e0d55a0c43c7 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Wed, 22 Apr 2026 00:17:58 +0200 Subject: [PATCH 19/20] Update validate_examples.yaml Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- .github/workflows/validate_examples.yaml | 64 +++--------------------- 1 file changed, 8 insertions(+), 56 deletions(-) diff --git a/.github/workflows/validate_examples.yaml b/.github/workflows/validate_examples.yaml index 7f109a868..ae784965e 100644 --- a/.github/workflows/validate_examples.yaml +++ b/.github/workflows/validate_examples.yaml @@ -29,13 +29,6 @@ jobs: validate: runs-on: ubuntu-latest env: - GOVER: 1.21 - GOOS: linux - GOARCH: amd64 - GOPROXY: https://proxy.golang.org - DAPR_INSTALL_URL: https://raw.githubusercontent.com/dapr/cli/master/install/install.sh - DAPR_CLI_REF: ${{ github.event.inputs.daprcli_commit }} - DAPR_REF: ${{ github.event.inputs.daprdapr_commit }} CHECKOUT_REPO: ${{ github.repository }} CHECKOUT_REF: ${{ github.ref }} @@ -50,7 +43,6 @@ jobs: if [ ${{ github.event.client_payload.command }} = "ok-to-test" ]; then echo "CHECKOUT_REPO=${{ github.event.client_payload.pull_head_repo }}" >> $GITHUB_ENV echo "CHECKOUT_REF=${{ github.event.client_payload.pull_head_ref }}" >> $GITHUB_ENV - echo "DAPR_REF=master" >> $GITHUB_ENV fi - name: Check out code onto GOPATH @@ -96,61 +88,21 @@ jobs: python -m pip install --upgrade pip pip install setuptools wheel twine tox - name: Set up Dapr CLI - run: | - wget -q "https://github.com/dapr/cli/releases/download/v${{ env.DAPR_CLI_VER }}/dapr_${{ env.GOOS }}_${{ env.GOARCH }}.tar.gz" -O /tmp/dapr.tar.gz - sudo tar xzf /tmp/dapr.tar.gz -C /usr/local/bin dapr - dapr --version - - name: Set up Go ${{ env.GOVER }} - if: env.DAPR_REF != '' || env.DAPR_CLI_REF != '' - uses: actions/setup-go@v6 + uses: dapr/.github/.github/actions/setup-dapr-cli@main + with: + commit: ${{ github.event.inputs.daprcli_commit }} + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Set up Dapr runtime + uses: dapr/.github/.github/actions/setup-dapr-runtime@main with: - go-version: ${{ env.GOVER }} + commit: ${{ github.event.inputs.daprdapr_commit }} + github-token: ${{ secrets.GITHUB_TOKEN }} - name: Set up Llama run: | curl -fsSL https://ollama.com/install.sh | sh nohup ollama serve & sleep 10 ollama pull llama3.2:latest - - name: Checkout Dapr CLI repo to override dapr command. - uses: actions/checkout@v6 - if: env.DAPR_CLI_REF != '' - with: - repository: dapr/cli - ref: ${{ env.DAPR_CLI_REF }} - path: cli - - name: Checkout Dapr repo to override daprd. - uses: actions/checkout@v6 - if: env.DAPR_REF != '' - with: - repository: dapr/dapr - ref: ${{ env.DAPR_REF }} - path: dapr_runtime - - name: Build and override dapr cli with referenced commit. - if: env.DAPR_CLI_REF != '' - run: | - cd cli - make - sudo cp dist/linux_amd64/release/dapr /usr/local/bin/dapr - cd .. - - name: Initialize Dapr runtime ${{ env.DAPR_RUNTIME_VER }} - run: | - dapr uninstall --all - dapr init --runtime-version ${{ env.DAPR_RUNTIME_VER }} - - name: Build and override daprd with referenced commit. - if: env.DAPR_REF != '' - run: | - cd dapr_runtime - make - mkdir -p $HOME/.dapr/bin/ - cp dist/linux_amd64/release/daprd $HOME/.dapr/bin/daprd - cd .. - - name: Override placement service. - if: env.DAPR_REF != '' - run: | - docker stop dapr_placement - cd dapr_runtime - ./dist/linux_amd64/release/placement --healthz-port 9091 & - cd .. - name: Check examples run: | tox -e integration From 53ddae857b7aca2f9991c77e060184e1a148fe89 Mon Sep 17 00:00:00 2001 From: Sergio Herrera <627709+seherv@users.noreply.github.com> Date: Wed, 22 Apr 2026 16:52:20 +0200 Subject: [PATCH 20/20] Run python3 in test_grpc_proxying.py Signed-off-by: Sergio Herrera <627709+seherv@users.noreply.github.com> --- tests/integration/test_grpc_proxying.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_grpc_proxying.py b/tests/integration/test_grpc_proxying.py index 8f1ce51d3..12933df1f 100644 --- a/tests/integration/test_grpc_proxying.py +++ b/tests/integration/test_grpc_proxying.py @@ -8,11 +8,11 @@ @pytest.mark.example_dir('grpc_proxying') def test_grpc_proxying(dapr): dapr.start( - '--app-id invoke-receiver --app-protocol grpc --app-port 50051 --config config.yaml -- python invoke-receiver.py', + '--app-id invoke-receiver --app-protocol grpc --app-port 50051 --config config.yaml -- python3 invoke-receiver.py', wait=5, ) caller_output = dapr.run( - '--app-id invoke-caller --dapr-grpc-port 50007 --config config.yaml -- python invoke-caller.py', + '--app-id invoke-caller --dapr-grpc-port 50007 --config config.yaml -- python3 invoke-caller.py', timeout=30, ) for line in EXPECTED_CALLER: