[api] Device defined action responses (#12136)
Co-authored-by: J. Nick Koston <nick@home-assistant.io> Co-authored-by: J. Nick Koston <nick@koston.org>
This commit is contained in:
@@ -181,6 +181,99 @@ api:
|
||||
else:
|
||||
- logger.log: "Skipped loops"
|
||||
- logger.log: "After combined test"
|
||||
# ==========================================================================
|
||||
# supports_response: status (auto-detected - api.respond without data)
|
||||
# Has call_id only - reports success/error without data payload
|
||||
# ==========================================================================
|
||||
- action: test_respond_status
|
||||
then:
|
||||
- api.respond:
|
||||
success: true
|
||||
- logger.log:
|
||||
format: "Status response sent (call_id=%d)"
|
||||
args: [call_id]
|
||||
|
||||
- action: test_respond_status_error
|
||||
variables:
|
||||
error_msg: string
|
||||
then:
|
||||
- api.respond:
|
||||
success: false
|
||||
error_message: !lambda 'return error_msg;'
|
||||
|
||||
# ==========================================================================
|
||||
# supports_response: optional (auto-detected - api.respond with data)
|
||||
# Has call_id and return_response - client decides if it wants response
|
||||
# ==========================================================================
|
||||
- action: test_respond_optional
|
||||
variables:
|
||||
sensor_name: string
|
||||
value: float
|
||||
then:
|
||||
- logger.log:
|
||||
format: "Optional response (call_id=%d, return_response=%d)"
|
||||
args: [call_id, return_response]
|
||||
- api.respond:
|
||||
data: !lambda |-
|
||||
root["sensor"] = sensor_name;
|
||||
root["value"] = value;
|
||||
root["unit"] = "°C";
|
||||
|
||||
- action: test_respond_optional_conditional
|
||||
variables:
|
||||
do_succeed: bool
|
||||
then:
|
||||
- if:
|
||||
condition:
|
||||
lambda: 'return do_succeed;'
|
||||
then:
|
||||
- api.respond:
|
||||
success: true
|
||||
data: !lambda |-
|
||||
root["status"] = "ok";
|
||||
else:
|
||||
- api.respond:
|
||||
success: false
|
||||
error_message: "Operation failed"
|
||||
|
||||
# ==========================================================================
|
||||
# supports_response: only (explicit - always expects data response)
|
||||
# Has call_id only - response is always expected with data
|
||||
# ==========================================================================
|
||||
- action: test_respond_only
|
||||
supports_response: only
|
||||
variables:
|
||||
input: string
|
||||
then:
|
||||
- logger.log:
|
||||
format: "Only response (call_id=%d)"
|
||||
args: [call_id]
|
||||
- api.respond:
|
||||
data: !lambda |-
|
||||
root["input"] = input;
|
||||
root["processed"] = true;
|
||||
|
||||
- action: test_respond_only_nested
|
||||
supports_response: only
|
||||
then:
|
||||
- api.respond:
|
||||
data: !lambda |-
|
||||
root["config"]["wifi"] = "connected";
|
||||
root["config"]["api"] = true;
|
||||
root["items"][0] = "item1";
|
||||
root["items"][1] = "item2";
|
||||
|
||||
# ==========================================================================
|
||||
# supports_response: none (no api.respond action)
|
||||
# No call_id or return_response - just user variables
|
||||
# ==========================================================================
|
||||
- action: test_no_response
|
||||
variables:
|
||||
message: string
|
||||
then:
|
||||
- logger.log:
|
||||
format: "No response action: %s"
|
||||
args: [message.c_str()]
|
||||
|
||||
event:
|
||||
- platform: template
|
||||
|
||||
@@ -252,7 +252,7 @@ my_service = next((s for s in services if s.name == "my_service"), None)
|
||||
assert my_service is not None
|
||||
|
||||
# Execute with parameters
|
||||
client.execute_service(my_service, {"param1": "value1", "param2": 42})
|
||||
await client.execute_service(my_service, {"param1": "value1", "param2": 42})
|
||||
```
|
||||
|
||||
##### Multiple Entity Tracking
|
||||
|
||||
93
tests/integration/fixtures/api_action_responses.yaml
Normal file
93
tests/integration/fixtures/api_action_responses.yaml
Normal file
@@ -0,0 +1,93 @@
|
||||
esphome:
|
||||
name: api-action-responses-test
|
||||
|
||||
host:
|
||||
|
||||
logger:
|
||||
level: DEBUG
|
||||
|
||||
api:
|
||||
actions:
|
||||
# ==========================================================================
|
||||
# supports_response: none (default - no api.respond action)
|
||||
# No call_id or return_response - just user variables
|
||||
# ==========================================================================
|
||||
- action: action_no_response
|
||||
variables:
|
||||
message: string
|
||||
then:
|
||||
- logger.log:
|
||||
format: "ACTION_NO_RESPONSE called with: %s"
|
||||
args: [message.c_str()]
|
||||
|
||||
# ==========================================================================
|
||||
# supports_response: status (auto-detected - api.respond without data)
|
||||
# Has call_id only - reports success/error without data payload
|
||||
# ==========================================================================
|
||||
- action: action_status_response
|
||||
variables:
|
||||
should_succeed: bool
|
||||
then:
|
||||
- if:
|
||||
condition:
|
||||
lambda: 'return should_succeed;'
|
||||
then:
|
||||
- api.respond:
|
||||
success: true
|
||||
- logger.log:
|
||||
format: "ACTION_STATUS_RESPONSE success (call_id=%d)"
|
||||
args: [call_id]
|
||||
else:
|
||||
- api.respond:
|
||||
success: false
|
||||
error_message: "Intentional failure for testing"
|
||||
- logger.log:
|
||||
format: "ACTION_STATUS_RESPONSE error (call_id=%d)"
|
||||
args: [call_id]
|
||||
|
||||
# ==========================================================================
|
||||
# supports_response: optional (auto-detected - api.respond with data)
|
||||
# Has call_id and return_response - client decides if it wants response
|
||||
# ==========================================================================
|
||||
- action: action_optional_response
|
||||
variables:
|
||||
value: int
|
||||
then:
|
||||
- logger.log:
|
||||
format: "ACTION_OPTIONAL_RESPONSE (call_id=%d, return_response=%d, value=%d)"
|
||||
args: [call_id, return_response, value]
|
||||
- api.respond:
|
||||
data: !lambda |-
|
||||
root["input"] = value;
|
||||
root["doubled"] = value * 2;
|
||||
|
||||
# ==========================================================================
|
||||
# supports_response: only (explicit - always expects data response)
|
||||
# Has call_id only - response is always expected with data
|
||||
# ==========================================================================
|
||||
- action: action_only_response
|
||||
supports_response: only
|
||||
variables:
|
||||
name: string
|
||||
then:
|
||||
- logger.log:
|
||||
format: "ACTION_ONLY_RESPONSE (call_id=%d, name=%s)"
|
||||
args: [call_id, name.c_str()]
|
||||
- api.respond:
|
||||
data: !lambda |-
|
||||
root["greeting"] = "Hello, " + name + "!";
|
||||
root["length"] = name.length();
|
||||
|
||||
# Test action with nested JSON response
|
||||
- action: action_nested_json
|
||||
supports_response: only
|
||||
then:
|
||||
- logger.log:
|
||||
format: "ACTION_NESTED_JSON (call_id=%d)"
|
||||
args: [call_id]
|
||||
- api.respond:
|
||||
data: !lambda |-
|
||||
root["config"]["wifi"]["connected"] = true;
|
||||
root["config"]["api"]["port"] = 6053;
|
||||
root["items"][0] = "first";
|
||||
root["items"][1] = "second";
|
||||
45
tests/integration/fixtures/api_action_timeout.yaml
Normal file
45
tests/integration/fixtures/api_action_timeout.yaml
Normal file
@@ -0,0 +1,45 @@
|
||||
esphome:
|
||||
name: api-action-timeout-test
|
||||
# Use a short timeout for testing (500ms instead of 30s)
|
||||
platformio_options:
|
||||
build_flags:
|
||||
- "-DUSE_API_ACTION_CALL_TIMEOUT_MS=500"
|
||||
|
||||
host:
|
||||
|
||||
logger:
|
||||
level: DEBUG
|
||||
|
||||
api:
|
||||
actions:
|
||||
# Action that responds immediately - should work fine
|
||||
- action: action_immediate
|
||||
supports_response: only
|
||||
then:
|
||||
- logger.log: "ACTION_IMMEDIATE responding"
|
||||
- api.respond:
|
||||
data: !lambda |-
|
||||
root["status"] = "immediate";
|
||||
|
||||
# Action that delays 200ms before responding - should work (within 500ms timeout)
|
||||
- action: action_short_delay
|
||||
supports_response: only
|
||||
then:
|
||||
- logger.log: "ACTION_SHORT_DELAY starting"
|
||||
- delay: 200ms
|
||||
- logger.log: "ACTION_SHORT_DELAY responding"
|
||||
- api.respond:
|
||||
data: !lambda |-
|
||||
root["status"] = "short_delay";
|
||||
|
||||
# Action that delays 1s before responding - should fail (exceeds 500ms timeout)
|
||||
# The api.respond will log a warning because the action call was already cleaned up
|
||||
- action: action_long_delay
|
||||
supports_response: only
|
||||
then:
|
||||
- logger.log: "ACTION_LONG_DELAY starting"
|
||||
- delay: 1s
|
||||
- logger.log: "ACTION_LONG_DELAY responding (after timeout)"
|
||||
- api.respond:
|
||||
data: !lambda |-
|
||||
root["status"] = "long_delay";
|
||||
258
tests/integration/test_api_action_responses.py
Normal file
258
tests/integration/test_api_action_responses.py
Normal file
@@ -0,0 +1,258 @@
|
||||
"""Integration test for API action responses feature.
|
||||
|
||||
Tests the supports_response modes: none, status, optional, only.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import re
|
||||
|
||||
from aioesphomeapi import SupportsResponseType, UserService, UserServiceArgType
|
||||
import pytest
|
||||
|
||||
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_action_responses(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected: APIClientConnectedFactory,
|
||||
) -> None:
|
||||
"""Test API action response modes work correctly."""
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
# Track log messages for each action type
|
||||
no_response_future = loop.create_future()
|
||||
status_success_future = loop.create_future()
|
||||
status_error_future = loop.create_future()
|
||||
optional_response_future = loop.create_future()
|
||||
only_response_future = loop.create_future()
|
||||
nested_json_future = loop.create_future()
|
||||
|
||||
# Patterns to match in logs
|
||||
no_response_pattern = re.compile(r"ACTION_NO_RESPONSE called with: test_message")
|
||||
status_success_pattern = re.compile(
|
||||
r"ACTION_STATUS_RESPONSE success \(call_id=\d+\)"
|
||||
)
|
||||
status_error_pattern = re.compile(r"ACTION_STATUS_RESPONSE error \(call_id=\d+\)")
|
||||
optional_response_pattern = re.compile(
|
||||
r"ACTION_OPTIONAL_RESPONSE \(call_id=\d+, return_response=\d+, value=42\)"
|
||||
)
|
||||
only_response_pattern = re.compile(
|
||||
r"ACTION_ONLY_RESPONSE \(call_id=\d+, name=World\)"
|
||||
)
|
||||
nested_json_pattern = re.compile(r"ACTION_NESTED_JSON \(call_id=\d+\)")
|
||||
|
||||
def check_output(line: str) -> None:
|
||||
"""Check log output for expected messages."""
|
||||
if not no_response_future.done() and no_response_pattern.search(line):
|
||||
no_response_future.set_result(True)
|
||||
elif not status_success_future.done() and status_success_pattern.search(line):
|
||||
status_success_future.set_result(True)
|
||||
elif not status_error_future.done() and status_error_pattern.search(line):
|
||||
status_error_future.set_result(True)
|
||||
elif not optional_response_future.done() and optional_response_pattern.search(
|
||||
line
|
||||
):
|
||||
optional_response_future.set_result(True)
|
||||
elif not only_response_future.done() and only_response_pattern.search(line):
|
||||
only_response_future.set_result(True)
|
||||
elif not nested_json_future.done() and nested_json_pattern.search(line):
|
||||
nested_json_future.set_result(True)
|
||||
|
||||
# Run with log monitoring
|
||||
async with (
|
||||
run_compiled(yaml_config, line_callback=check_output),
|
||||
api_client_connected() as client,
|
||||
):
|
||||
# Verify device info
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "api-action-responses-test"
|
||||
|
||||
# List services
|
||||
_, services = await client.list_entities_services()
|
||||
|
||||
# Should have 5 services
|
||||
assert len(services) == 5, f"Expected 5 services, found {len(services)}"
|
||||
|
||||
# Find our services
|
||||
action_no_response: UserService | None = None
|
||||
action_status_response: UserService | None = None
|
||||
action_optional_response: UserService | None = None
|
||||
action_only_response: UserService | None = None
|
||||
action_nested_json: UserService | None = None
|
||||
|
||||
for service in services:
|
||||
if service.name == "action_no_response":
|
||||
action_no_response = service
|
||||
elif service.name == "action_status_response":
|
||||
action_status_response = service
|
||||
elif service.name == "action_optional_response":
|
||||
action_optional_response = service
|
||||
elif service.name == "action_only_response":
|
||||
action_only_response = service
|
||||
elif service.name == "action_nested_json":
|
||||
action_nested_json = service
|
||||
|
||||
assert action_no_response is not None, "action_no_response not found"
|
||||
assert action_status_response is not None, "action_status_response not found"
|
||||
assert action_optional_response is not None, (
|
||||
"action_optional_response not found"
|
||||
)
|
||||
assert action_only_response is not None, "action_only_response not found"
|
||||
assert action_nested_json is not None, "action_nested_json not found"
|
||||
|
||||
# Verify supports_response modes
|
||||
assert action_no_response.supports_response is None or (
|
||||
action_no_response.supports_response == SupportsResponseType.NONE
|
||||
), (
|
||||
f"action_no_response should have supports_response=NONE, got {action_no_response.supports_response}"
|
||||
)
|
||||
|
||||
assert (
|
||||
action_status_response.supports_response == SupportsResponseType.STATUS
|
||||
), (
|
||||
f"action_status_response should have supports_response=STATUS, "
|
||||
f"got {action_status_response.supports_response}"
|
||||
)
|
||||
|
||||
assert (
|
||||
action_optional_response.supports_response == SupportsResponseType.OPTIONAL
|
||||
), (
|
||||
f"action_optional_response should have supports_response=OPTIONAL, "
|
||||
f"got {action_optional_response.supports_response}"
|
||||
)
|
||||
|
||||
assert action_only_response.supports_response == SupportsResponseType.ONLY, (
|
||||
f"action_only_response should have supports_response=ONLY, "
|
||||
f"got {action_only_response.supports_response}"
|
||||
)
|
||||
|
||||
assert action_nested_json.supports_response == SupportsResponseType.ONLY, (
|
||||
f"action_nested_json should have supports_response=ONLY, "
|
||||
f"got {action_nested_json.supports_response}"
|
||||
)
|
||||
|
||||
# Verify argument types
|
||||
# action_no_response: string message
|
||||
assert len(action_no_response.args) == 1
|
||||
assert action_no_response.args[0].name == "message"
|
||||
assert action_no_response.args[0].type == UserServiceArgType.STRING
|
||||
|
||||
# action_status_response: bool should_succeed
|
||||
assert len(action_status_response.args) == 1
|
||||
assert action_status_response.args[0].name == "should_succeed"
|
||||
assert action_status_response.args[0].type == UserServiceArgType.BOOL
|
||||
|
||||
# action_optional_response: int value
|
||||
assert len(action_optional_response.args) == 1
|
||||
assert action_optional_response.args[0].name == "value"
|
||||
assert action_optional_response.args[0].type == UserServiceArgType.INT
|
||||
|
||||
# action_only_response: string name
|
||||
assert len(action_only_response.args) == 1
|
||||
assert action_only_response.args[0].name == "name"
|
||||
assert action_only_response.args[0].type == UserServiceArgType.STRING
|
||||
|
||||
# action_nested_json: no args
|
||||
assert len(action_nested_json.args) == 0
|
||||
|
||||
# Test action_no_response (supports_response: none)
|
||||
# No response expected for this action
|
||||
response = await client.execute_service(
|
||||
action_no_response, {"message": "test_message"}
|
||||
)
|
||||
assert response is None, "action_no_response should not return a response"
|
||||
await asyncio.wait_for(no_response_future, timeout=5.0)
|
||||
|
||||
# Test action_status_response with success (supports_response: status)
|
||||
response = await client.execute_service(
|
||||
action_status_response,
|
||||
{"should_succeed": True},
|
||||
return_response=True,
|
||||
)
|
||||
await asyncio.wait_for(status_success_future, timeout=5.0)
|
||||
assert response is not None, "Expected response for status action"
|
||||
assert response.success is True, (
|
||||
f"Expected success=True, got {response.success}"
|
||||
)
|
||||
assert response.error_message == "", (
|
||||
f"Expected empty error_message, got '{response.error_message}'"
|
||||
)
|
||||
|
||||
# Test action_status_response with error
|
||||
response = await client.execute_service(
|
||||
action_status_response,
|
||||
{"should_succeed": False},
|
||||
return_response=True,
|
||||
)
|
||||
await asyncio.wait_for(status_error_future, timeout=5.0)
|
||||
assert response is not None, "Expected response for status action"
|
||||
assert response.success is False, (
|
||||
f"Expected success=False, got {response.success}"
|
||||
)
|
||||
assert "Intentional failure" in response.error_message, (
|
||||
f"Expected error message containing 'Intentional failure', "
|
||||
f"got '{response.error_message}'"
|
||||
)
|
||||
|
||||
# Test action_optional_response (supports_response: optional)
|
||||
response = await client.execute_service(
|
||||
action_optional_response,
|
||||
{"value": 42},
|
||||
return_response=True,
|
||||
)
|
||||
await asyncio.wait_for(optional_response_future, timeout=5.0)
|
||||
assert response is not None, "Expected response for optional action"
|
||||
assert response.success is True, (
|
||||
f"Expected success=True, got {response.success}"
|
||||
)
|
||||
# Parse response data as JSON
|
||||
response_json = json.loads(response.response_data.decode("utf-8"))
|
||||
assert response_json["input"] == 42, (
|
||||
f"Expected input=42, got {response_json.get('input')}"
|
||||
)
|
||||
assert response_json["doubled"] == 84, (
|
||||
f"Expected doubled=84, got {response_json.get('doubled')}"
|
||||
)
|
||||
|
||||
# Test action_only_response (supports_response: only)
|
||||
response = await client.execute_service(
|
||||
action_only_response,
|
||||
{"name": "World"},
|
||||
return_response=True,
|
||||
)
|
||||
await asyncio.wait_for(only_response_future, timeout=5.0)
|
||||
assert response is not None, "Expected response for only action"
|
||||
assert response.success is True, (
|
||||
f"Expected success=True, got {response.success}"
|
||||
)
|
||||
response_json = json.loads(response.response_data.decode("utf-8"))
|
||||
assert response_json["greeting"] == "Hello, World!", (
|
||||
f"Expected greeting='Hello, World!', got {response_json.get('greeting')}"
|
||||
)
|
||||
assert response_json["length"] == 5, (
|
||||
f"Expected length=5, got {response_json.get('length')}"
|
||||
)
|
||||
|
||||
# Test action_nested_json
|
||||
response = await client.execute_service(
|
||||
action_nested_json,
|
||||
{},
|
||||
return_response=True,
|
||||
)
|
||||
await asyncio.wait_for(nested_json_future, timeout=5.0)
|
||||
assert response is not None, "Expected response for nested json action"
|
||||
assert response.success is True, (
|
||||
f"Expected success=True, got {response.success}"
|
||||
)
|
||||
response_json = json.loads(response.response_data.decode("utf-8"))
|
||||
# Verify nested structure
|
||||
assert response_json["config"]["wifi"]["connected"] is True
|
||||
assert response_json["config"]["api"]["port"] == 6053
|
||||
assert response_json["items"][0] == "first"
|
||||
assert response_json["items"][1] == "second"
|
||||
172
tests/integration/test_api_action_timeout.py
Normal file
172
tests/integration/test_api_action_timeout.py
Normal file
@@ -0,0 +1,172 @@
|
||||
"""Integration test for API action call timeout functionality.
|
||||
|
||||
Tests that action calls are automatically cleaned up after timeout,
|
||||
and that late responses are handled gracefully.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
import re
|
||||
|
||||
from aioesphomeapi import UserService
|
||||
import pytest
|
||||
|
||||
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_action_timeout(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected: APIClientConnectedFactory,
|
||||
) -> None:
|
||||
"""Test API action call timeout behavior.
|
||||
|
||||
This test uses a 500ms timeout (set via USE_API_ACTION_CALL_TIMEOUT_MS define)
|
||||
to verify:
|
||||
1. Actions that respond within the timeout work correctly
|
||||
2. Actions that exceed the timeout have their calls cleaned up
|
||||
3. Late responses log a warning but don't crash
|
||||
"""
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
# Track log messages
|
||||
immediate_future = loop.create_future()
|
||||
short_delay_responding_future = loop.create_future()
|
||||
long_delay_starting_future = loop.create_future()
|
||||
long_delay_responding_future = loop.create_future()
|
||||
timeout_warning_future = loop.create_future()
|
||||
|
||||
# Patterns to match in logs
|
||||
immediate_pattern = re.compile(r"ACTION_IMMEDIATE responding")
|
||||
short_delay_responding_pattern = re.compile(r"ACTION_SHORT_DELAY responding")
|
||||
long_delay_starting_pattern = re.compile(r"ACTION_LONG_DELAY starting")
|
||||
long_delay_responding_pattern = re.compile(
|
||||
r"ACTION_LONG_DELAY responding \(after timeout\)"
|
||||
)
|
||||
# This warning is logged when api.respond is called after the action call timed out
|
||||
timeout_warning_pattern = re.compile(
|
||||
r"Cannot send response: no active call found for action_call_id"
|
||||
)
|
||||
|
||||
def check_output(line: str) -> None:
|
||||
"""Check log output for expected messages."""
|
||||
if not immediate_future.done() and immediate_pattern.search(line):
|
||||
immediate_future.set_result(True)
|
||||
elif (
|
||||
not short_delay_responding_future.done()
|
||||
and short_delay_responding_pattern.search(line)
|
||||
):
|
||||
short_delay_responding_future.set_result(True)
|
||||
elif (
|
||||
not long_delay_starting_future.done()
|
||||
and long_delay_starting_pattern.search(line)
|
||||
):
|
||||
long_delay_starting_future.set_result(True)
|
||||
elif (
|
||||
not long_delay_responding_future.done()
|
||||
and long_delay_responding_pattern.search(line)
|
||||
):
|
||||
long_delay_responding_future.set_result(True)
|
||||
elif not timeout_warning_future.done() and timeout_warning_pattern.search(line):
|
||||
timeout_warning_future.set_result(True)
|
||||
|
||||
# Run with log monitoring
|
||||
async with (
|
||||
run_compiled(yaml_config, line_callback=check_output),
|
||||
api_client_connected() as client,
|
||||
):
|
||||
# Verify device info
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "api-action-timeout-test"
|
||||
|
||||
# List services
|
||||
_, services = await client.list_entities_services()
|
||||
|
||||
# Should have 3 services
|
||||
assert len(services) == 3, f"Expected 3 services, found {len(services)}"
|
||||
|
||||
# Find our services
|
||||
action_immediate: UserService | None = None
|
||||
action_short_delay: UserService | None = None
|
||||
action_long_delay: UserService | None = None
|
||||
|
||||
for service in services:
|
||||
if service.name == "action_immediate":
|
||||
action_immediate = service
|
||||
elif service.name == "action_short_delay":
|
||||
action_short_delay = service
|
||||
elif service.name == "action_long_delay":
|
||||
action_long_delay = service
|
||||
|
||||
assert action_immediate is not None, "action_immediate not found"
|
||||
assert action_short_delay is not None, "action_short_delay not found"
|
||||
assert action_long_delay is not None, "action_long_delay not found"
|
||||
|
||||
# Test 1: Immediate response should work
|
||||
response = await client.execute_service(
|
||||
action_immediate,
|
||||
{},
|
||||
return_response=True,
|
||||
)
|
||||
await asyncio.wait_for(immediate_future, timeout=1.0)
|
||||
assert response is not None, "Expected response for immediate action"
|
||||
assert response.success is True
|
||||
|
||||
# Test 2: Short delay (200ms) should work within the 500ms timeout
|
||||
response = await client.execute_service(
|
||||
action_short_delay,
|
||||
{},
|
||||
return_response=True,
|
||||
)
|
||||
await asyncio.wait_for(short_delay_responding_future, timeout=1.0)
|
||||
assert response is not None, "Expected response for short delay action"
|
||||
assert response.success is True
|
||||
|
||||
# Test 3: Long delay (1s) should exceed the 500ms timeout
|
||||
# The server-side timeout will clean up the action call after 500ms
|
||||
# The client will timeout waiting for the response
|
||||
# When the action finally tries to respond after 1s, it will log a warning
|
||||
|
||||
# Start the long delay action (don't await it fully - it will timeout)
|
||||
long_delay_task = asyncio.create_task(
|
||||
client.execute_service(
|
||||
action_long_delay,
|
||||
{},
|
||||
return_response=True,
|
||||
timeout=2.0, # Give client enough time to see the late response attempt
|
||||
)
|
||||
)
|
||||
|
||||
# Wait for the action to start
|
||||
await asyncio.wait_for(long_delay_starting_future, timeout=1.0)
|
||||
|
||||
# Wait for the action to try to respond (after 1s delay)
|
||||
await asyncio.wait_for(long_delay_responding_future, timeout=2.0)
|
||||
|
||||
# Wait for the warning log about no active call
|
||||
await asyncio.wait_for(timeout_warning_future, timeout=1.0)
|
||||
|
||||
# The client task should complete (either with None response or timeout)
|
||||
# Client timing out is acceptable - the server-side timeout already cleaned up the call
|
||||
with contextlib.suppress(TimeoutError):
|
||||
await asyncio.wait_for(long_delay_task, timeout=1.0)
|
||||
|
||||
# Verify the system is still functional after the timeout
|
||||
# Call the immediate action again to prove cleanup worked
|
||||
immediate_future_2 = loop.create_future()
|
||||
|
||||
def check_output_2(line: str) -> None:
|
||||
if not immediate_future_2.done() and immediate_pattern.search(line):
|
||||
immediate_future_2.set_result(True)
|
||||
|
||||
response = await client.execute_service(
|
||||
action_immediate,
|
||||
{},
|
||||
return_response=True,
|
||||
)
|
||||
assert response is not None, "System should still work after timeout"
|
||||
assert response.success is True
|
||||
@@ -88,13 +88,13 @@ async def test_api_conditional_memory(
|
||||
assert arg_types["arg_float"] == UserServiceArgType.FLOAT
|
||||
|
||||
# Call simple service
|
||||
client.execute_service(simple_service, {})
|
||||
await client.execute_service(simple_service, {})
|
||||
|
||||
# Wait for service log
|
||||
await asyncio.wait_for(service_simple_future, timeout=5.0)
|
||||
|
||||
# Call service with arguments
|
||||
client.execute_service(
|
||||
await client.execute_service(
|
||||
service_with_args,
|
||||
{
|
||||
"arg_string": "test_string",
|
||||
|
||||
@@ -114,7 +114,7 @@ async def test_api_custom_services(
|
||||
assert custom_arrays_service is not None, "custom_service_with_arrays not found"
|
||||
|
||||
# Test YAML service
|
||||
client.execute_service(yaml_service, {})
|
||||
await client.execute_service(yaml_service, {})
|
||||
await asyncio.wait_for(yaml_service_future, timeout=5.0)
|
||||
|
||||
# Verify YAML service with args arguments
|
||||
@@ -124,7 +124,7 @@ async def test_api_custom_services(
|
||||
assert yaml_args_types["my_string"] == UserServiceArgType.STRING
|
||||
|
||||
# Test YAML service with arguments
|
||||
client.execute_service(
|
||||
await client.execute_service(
|
||||
yaml_args_service,
|
||||
{
|
||||
"my_int": 123,
|
||||
@@ -144,7 +144,7 @@ async def test_api_custom_services(
|
||||
assert yaml_many_args_types["arg4"] == UserServiceArgType.STRING
|
||||
|
||||
# Test YAML service with many arguments
|
||||
client.execute_service(
|
||||
await client.execute_service(
|
||||
yaml_many_args_service,
|
||||
{
|
||||
"arg1": 42,
|
||||
@@ -156,7 +156,7 @@ async def test_api_custom_services(
|
||||
await asyncio.wait_for(yaml_many_args_future, timeout=5.0)
|
||||
|
||||
# Test simple CustomAPIDevice service
|
||||
client.execute_service(custom_service, {})
|
||||
await client.execute_service(custom_service, {})
|
||||
await asyncio.wait_for(custom_service_future, timeout=5.0)
|
||||
|
||||
# Verify custom_args_service arguments
|
||||
@@ -168,7 +168,7 @@ async def test_api_custom_services(
|
||||
assert arg_types["arg_float"] == UserServiceArgType.FLOAT
|
||||
|
||||
# Test CustomAPIDevice service with arguments
|
||||
client.execute_service(
|
||||
await client.execute_service(
|
||||
custom_args_service,
|
||||
{
|
||||
"arg_string": "test_string",
|
||||
@@ -188,7 +188,7 @@ async def test_api_custom_services(
|
||||
assert array_arg_types["string_array"] == UserServiceArgType.STRING_ARRAY
|
||||
|
||||
# Test CustomAPIDevice service with arrays
|
||||
client.execute_service(
|
||||
await client.execute_service(
|
||||
custom_arrays_service,
|
||||
{
|
||||
"bool_array": [True, False],
|
||||
|
||||
@@ -163,7 +163,7 @@ async def test_api_homeassistant(
|
||||
assert trigger_service is not None, "trigger_all_tests service not found"
|
||||
|
||||
# Execute all tests
|
||||
client.execute_service(trigger_service, {})
|
||||
await client.execute_service(trigger_service, {})
|
||||
|
||||
# Wait for all tests to complete with appropriate timeouts
|
||||
try:
|
||||
|
||||
@@ -75,10 +75,12 @@ async def test_api_string_lambda(
|
||||
assert char_ptr_service is not None, "test_char_ptr_lambda service not found"
|
||||
|
||||
# Execute all four services to test different lambda return types
|
||||
client.execute_service(string_service, {"input_string": "STRING_FROM_LAMBDA"})
|
||||
client.execute_service(int_service, {"input_number": 42})
|
||||
client.execute_service(float_service, {"input_float": 3.14})
|
||||
client.execute_service(
|
||||
await client.execute_service(
|
||||
string_service, {"input_string": "STRING_FROM_LAMBDA"}
|
||||
)
|
||||
await client.execute_service(int_service, {"input_number": 42})
|
||||
await client.execute_service(float_service, {"input_float": 3.14})
|
||||
await client.execute_service(
|
||||
char_ptr_service, {"input_number": 123, "input_string": "test_string"}
|
||||
)
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ async def test_automation_wait_actions(
|
||||
# Test 1: wait_until in automation - trigger 5 times rapidly
|
||||
test_service = next((s for s in services if s.name == "test_wait_until"), None)
|
||||
assert test_service is not None, "test_wait_until service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test1_complete, timeout=3.0)
|
||||
|
||||
# Verify Test 1: All 5 triggers should complete
|
||||
@@ -82,7 +82,7 @@ async def test_automation_wait_actions(
|
||||
# Test 2: script.wait in automation - trigger 5 times rapidly
|
||||
test_service = next((s for s in services if s.name == "test_script_wait"), None)
|
||||
assert test_service is not None, "test_script_wait service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test2_complete, timeout=3.0)
|
||||
|
||||
# Verify Test 2: All 5 triggers should complete
|
||||
@@ -95,7 +95,7 @@ async def test_automation_wait_actions(
|
||||
(s for s in services if s.name == "test_wait_timeout"), None
|
||||
)
|
||||
assert test_service is not None, "test_wait_timeout service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test3_complete, timeout=3.0)
|
||||
|
||||
# Verify Test 3: All 5 triggers should timeout and complete
|
||||
|
||||
@@ -67,7 +67,7 @@ async def test_delay_action_cancellation(
|
||||
assert test_service is not None, "start_delay_then_restart service not found"
|
||||
|
||||
# Execute the test sequence
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
|
||||
# Wait for the second script to start
|
||||
await asyncio.wait_for(second_script_started, timeout=5.0)
|
||||
@@ -138,7 +138,7 @@ async def test_parallel_script_delays(
|
||||
assert test_service is not None, "test_parallel_delays service not found"
|
||||
|
||||
# Execute the test - this will start 3 parallel scripts with 1 second delays
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
|
||||
# Wait for all scripts to complete (should take ~1 second, not 3)
|
||||
await asyncio.wait_for(all_scripts_completed, timeout=2.0)
|
||||
|
||||
@@ -142,7 +142,7 @@ async def test_continuation_actions(
|
||||
# Test 1: IfAction with then branch
|
||||
test_service = next((s for s in services if s.name == "test_if_action"), None)
|
||||
assert test_service is not None, "test_if_action service not found"
|
||||
client.execute_service(test_service, {"condition": True, "value": 42})
|
||||
await client.execute_service(test_service, {"condition": True, "value": 42})
|
||||
await asyncio.wait_for(test1_complete, timeout=2.0)
|
||||
assert test_results["if_then"], "IfAction then branch not executed"
|
||||
assert test_results["if_complete"], "IfAction did not complete"
|
||||
@@ -150,7 +150,7 @@ async def test_continuation_actions(
|
||||
# Test 1b: IfAction with else branch
|
||||
test1_complete = loop.create_future()
|
||||
test_results["if_complete"] = False
|
||||
client.execute_service(test_service, {"condition": False, "value": 99})
|
||||
await client.execute_service(test_service, {"condition": False, "value": 99})
|
||||
await asyncio.wait_for(test1_complete, timeout=2.0)
|
||||
assert test_results["if_else"], "IfAction else branch not executed"
|
||||
assert test_results["if_complete"], "IfAction did not complete"
|
||||
@@ -160,14 +160,14 @@ async def test_continuation_actions(
|
||||
assert test_service is not None, "test_nested_if service not found"
|
||||
|
||||
# Both true
|
||||
client.execute_service(test_service, {"outer": True, "inner": True})
|
||||
await client.execute_service(test_service, {"outer": True, "inner": True})
|
||||
await asyncio.wait_for(test2_complete, timeout=2.0)
|
||||
assert test_results["nested_both_true"], "Nested both true not executed"
|
||||
|
||||
# Outer true, inner false
|
||||
test2_complete = loop.create_future()
|
||||
test_results["nested_complete"] = False
|
||||
client.execute_service(test_service, {"outer": True, "inner": False})
|
||||
await client.execute_service(test_service, {"outer": True, "inner": False})
|
||||
await asyncio.wait_for(test2_complete, timeout=2.0)
|
||||
assert test_results["nested_outer_true_inner_false"], (
|
||||
"Nested outer true inner false not executed"
|
||||
@@ -176,7 +176,7 @@ async def test_continuation_actions(
|
||||
# Outer false
|
||||
test2_complete = loop.create_future()
|
||||
test_results["nested_complete"] = False
|
||||
client.execute_service(test_service, {"outer": False, "inner": True})
|
||||
await client.execute_service(test_service, {"outer": False, "inner": True})
|
||||
await asyncio.wait_for(test2_complete, timeout=2.0)
|
||||
assert test_results["nested_outer_false"], "Nested outer false not executed"
|
||||
|
||||
@@ -185,7 +185,7 @@ async def test_continuation_actions(
|
||||
(s for s in services if s.name == "test_while_action"), None
|
||||
)
|
||||
assert test_service is not None, "test_while_action service not found"
|
||||
client.execute_service(test_service, {"max_count": 3})
|
||||
await client.execute_service(test_service, {"max_count": 3})
|
||||
await asyncio.wait_for(test3_complete, timeout=2.0)
|
||||
assert test_results["while_iterations"] == 3, (
|
||||
f"WhileAction expected 3 iterations, got {test_results['while_iterations']}"
|
||||
@@ -197,7 +197,7 @@ async def test_continuation_actions(
|
||||
(s for s in services if s.name == "test_repeat_action"), None
|
||||
)
|
||||
assert test_service is not None, "test_repeat_action service not found"
|
||||
client.execute_service(test_service, {"count": 5})
|
||||
await client.execute_service(test_service, {"count": 5})
|
||||
await asyncio.wait_for(test4_complete, timeout=2.0)
|
||||
assert test_results["repeat_iterations"] == 5, (
|
||||
f"RepeatAction expected 5 iterations, got {test_results['repeat_iterations']}"
|
||||
@@ -207,7 +207,7 @@ async def test_continuation_actions(
|
||||
# Test 5: Combined (if + repeat + while)
|
||||
test_service = next((s for s in services if s.name == "test_combined"), None)
|
||||
assert test_service is not None, "test_combined service not found"
|
||||
client.execute_service(test_service, {"do_loop": True, "loop_count": 2})
|
||||
await client.execute_service(test_service, {"do_loop": True, "loop_count": 2})
|
||||
await asyncio.wait_for(test5_complete, timeout=2.0)
|
||||
# Should execute: repeat 2 times, each iteration does while from iteration down to 0
|
||||
# iteration 0: while 0 times = 0
|
||||
@@ -221,7 +221,7 @@ async def test_continuation_actions(
|
||||
# Test 6: Rapid triggers (tests memory efficiency of ContinuationAction)
|
||||
test_service = next((s for s in services if s.name == "test_rapid_if"), None)
|
||||
assert test_service is not None, "test_rapid_if service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test6_complete, timeout=2.0)
|
||||
# Values 1, 2 should hit else (<=2), values 3, 4, 5 should hit then (>2)
|
||||
assert test_results["rapid_else"] == 2, (
|
||||
|
||||
@@ -98,7 +98,7 @@ async def test_scheduler_bulk_cleanup(
|
||||
)
|
||||
|
||||
# Execute the test
|
||||
client.execute_service(trigger_bulk_cleanup_service, {})
|
||||
await client.execute_service(trigger_bulk_cleanup_service, {})
|
||||
|
||||
# Wait for test completion
|
||||
try:
|
||||
|
||||
@@ -81,7 +81,7 @@ async def test_scheduler_defer_cancel(
|
||||
client.subscribe_states(on_state)
|
||||
|
||||
# Execute the test
|
||||
client.execute_service(test_defer_cancel_service, {})
|
||||
await client.execute_service(test_defer_cancel_service, {})
|
||||
|
||||
# Wait for test completion
|
||||
try:
|
||||
|
||||
@@ -59,7 +59,7 @@ async def test_scheduler_defer_cancels_regular(
|
||||
assert test_service is not None, "test_defer_cancels_regular service not found"
|
||||
|
||||
# Execute the test
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
|
||||
# Wait for test completion
|
||||
try:
|
||||
|
||||
@@ -84,7 +84,7 @@ async def test_scheduler_defer_fifo_simple(
|
||||
client.subscribe_states(on_state)
|
||||
|
||||
# Test 1: Test set_timeout(0)
|
||||
client.execute_service(test_set_timeout_service, {})
|
||||
await client.execute_service(test_set_timeout_service, {})
|
||||
|
||||
# Wait for first test completion
|
||||
try:
|
||||
@@ -102,7 +102,7 @@ async def test_scheduler_defer_fifo_simple(
|
||||
test_result_future = loop.create_future()
|
||||
|
||||
# Test 2: Test defer()
|
||||
client.execute_service(test_defer_service, {})
|
||||
await client.execute_service(test_defer_service, {})
|
||||
|
||||
# Wait for second test completion
|
||||
try:
|
||||
|
||||
@@ -92,7 +92,7 @@ async def test_scheduler_defer_stress(
|
||||
assert run_stress_test_service is not None, "run_stress_test service not found"
|
||||
|
||||
# Call the run_stress_test service to start the test
|
||||
client.execute_service(run_stress_test_service, {})
|
||||
await client.execute_service(run_stress_test_service, {})
|
||||
|
||||
# Wait for all defers to execute (should be quick)
|
||||
try:
|
||||
|
||||
@@ -99,7 +99,7 @@ async def test_scheduler_heap_stress(
|
||||
)
|
||||
|
||||
# Call the run_heap_stress_test service to start the test
|
||||
client.execute_service(run_stress_test_service, {})
|
||||
await client.execute_service(run_stress_test_service, {})
|
||||
|
||||
# Wait for all callbacks to execute (should be quick, but give more time for scheduling)
|
||||
try:
|
||||
|
||||
@@ -48,7 +48,7 @@ async def test_scheduler_null_name(
|
||||
assert test_null_name_service is not None, "test_null_name service not found"
|
||||
|
||||
# Execute the test
|
||||
client.execute_service(test_null_name_service, {})
|
||||
await client.execute_service(test_null_name_service, {})
|
||||
|
||||
# Wait for test completion
|
||||
try:
|
||||
|
||||
@@ -120,42 +120,42 @@ async def test_scheduler_pool(
|
||||
|
||||
try:
|
||||
# Phase 1: Component lifecycle
|
||||
client.execute_service(phase_services[1], {})
|
||||
await client.execute_service(phase_services[1], {})
|
||||
await asyncio.wait_for(phase_futures[1], timeout=1.0)
|
||||
await asyncio.sleep(0.05) # Let timeouts complete
|
||||
|
||||
# Phase 2: Sensor polling
|
||||
client.execute_service(phase_services[2], {})
|
||||
await client.execute_service(phase_services[2], {})
|
||||
await asyncio.wait_for(phase_futures[2], timeout=1.0)
|
||||
await asyncio.sleep(0.1) # Let intervals run a bit
|
||||
|
||||
# Phase 3: Communication patterns
|
||||
client.execute_service(phase_services[3], {})
|
||||
await client.execute_service(phase_services[3], {})
|
||||
await asyncio.wait_for(phase_futures[3], timeout=1.0)
|
||||
await asyncio.sleep(0.1) # Let heartbeat run
|
||||
|
||||
# Phase 4: Defer patterns
|
||||
client.execute_service(phase_services[4], {})
|
||||
await client.execute_service(phase_services[4], {})
|
||||
await asyncio.wait_for(phase_futures[4], timeout=1.0)
|
||||
await asyncio.sleep(0.2) # Let everything settle and recycle
|
||||
|
||||
# Phase 5: Pool reuse verification
|
||||
client.execute_service(phase_services[5], {})
|
||||
await client.execute_service(phase_services[5], {})
|
||||
await asyncio.wait_for(phase_futures[5], timeout=1.0)
|
||||
await asyncio.sleep(0.1) # Let Phase 5 timeouts complete and recycle
|
||||
|
||||
# Phase 6: Full pool reuse verification
|
||||
client.execute_service(phase_services[6], {})
|
||||
await client.execute_service(phase_services[6], {})
|
||||
await asyncio.wait_for(phase_futures[6], timeout=1.0)
|
||||
await asyncio.sleep(0.1) # Let Phase 6 timeouts complete
|
||||
|
||||
# Phase 7: Same-named defer optimization
|
||||
client.execute_service(phase_services[7], {})
|
||||
await client.execute_service(phase_services[7], {})
|
||||
await asyncio.wait_for(phase_futures[7], timeout=1.0)
|
||||
await asyncio.sleep(0.05) # Let the single defer execute
|
||||
|
||||
# Complete test
|
||||
client.execute_service(complete_service, {})
|
||||
await client.execute_service(complete_service, {})
|
||||
await asyncio.wait_for(test_complete_future, timeout=0.5)
|
||||
|
||||
except TimeoutError as e:
|
||||
|
||||
@@ -108,7 +108,7 @@ async def test_scheduler_rapid_cancellation(
|
||||
)
|
||||
|
||||
# Call the service to start the test
|
||||
client.execute_service(run_test_service, {})
|
||||
await client.execute_service(run_test_service, {})
|
||||
|
||||
# Wait for test to complete with timeout
|
||||
try:
|
||||
|
||||
@@ -79,7 +79,7 @@ async def test_scheduler_recursive_timeout(
|
||||
)
|
||||
|
||||
# Call the service to start the test
|
||||
client.execute_service(run_test_service, {})
|
||||
await client.execute_service(run_test_service, {})
|
||||
|
||||
# Wait for test to complete
|
||||
try:
|
||||
|
||||
@@ -81,7 +81,7 @@ async def test_scheduler_removed_item_race(
|
||||
assert run_test_service is not None, "run_test service not found"
|
||||
|
||||
# Execute the test
|
||||
client.execute_service(run_test_service, {})
|
||||
await client.execute_service(run_test_service, {})
|
||||
|
||||
# Wait for test completion
|
||||
try:
|
||||
|
||||
@@ -98,7 +98,7 @@ async def test_scheduler_simultaneous_callbacks(
|
||||
)
|
||||
|
||||
# Call the service to start the test
|
||||
client.execute_service(run_test_service, {})
|
||||
await client.execute_service(run_test_service, {})
|
||||
|
||||
# Wait for test to complete
|
||||
try:
|
||||
|
||||
@@ -134,27 +134,27 @@ async def test_scheduler_string_lifetime(
|
||||
# Run tests sequentially, waiting for each to complete
|
||||
try:
|
||||
# Test 1
|
||||
client.execute_service(test_services["test1"], {})
|
||||
await client.execute_service(test_services["test1"], {})
|
||||
await asyncio.wait_for(test1_complete.wait(), timeout=5.0)
|
||||
|
||||
# Test 2
|
||||
client.execute_service(test_services["test2"], {})
|
||||
await client.execute_service(test_services["test2"], {})
|
||||
await asyncio.wait_for(test2_complete.wait(), timeout=5.0)
|
||||
|
||||
# Test 3
|
||||
client.execute_service(test_services["test3"], {})
|
||||
await client.execute_service(test_services["test3"], {})
|
||||
await asyncio.wait_for(test3_complete.wait(), timeout=5.0)
|
||||
|
||||
# Test 4
|
||||
client.execute_service(test_services["test4"], {})
|
||||
await client.execute_service(test_services["test4"], {})
|
||||
await asyncio.wait_for(test4_complete.wait(), timeout=5.0)
|
||||
|
||||
# Test 5
|
||||
client.execute_service(test_services["test5"], {})
|
||||
await client.execute_service(test_services["test5"], {})
|
||||
await asyncio.wait_for(test5_complete.wait(), timeout=5.0)
|
||||
|
||||
# Final check
|
||||
client.execute_service(test_services["final"], {})
|
||||
await client.execute_service(test_services["final"], {})
|
||||
await asyncio.wait_for(all_tests_complete.wait(), timeout=5.0)
|
||||
|
||||
except TimeoutError:
|
||||
|
||||
@@ -92,7 +92,7 @@ async def test_scheduler_string_name_stress(
|
||||
)
|
||||
|
||||
# Call the service to start the test
|
||||
client.execute_service(run_stress_test_service, {})
|
||||
await client.execute_service(run_stress_test_service, {})
|
||||
|
||||
# Wait for test to complete or crash
|
||||
try:
|
||||
|
||||
@@ -90,7 +90,7 @@ async def test_script_delay_with_params(
|
||||
assert test_service is not None, "test_repeat_with_delay service not found"
|
||||
|
||||
# Execute the test
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
|
||||
# Wait for test to complete (10 iterations * ~100ms each + margin)
|
||||
try:
|
||||
|
||||
@@ -136,7 +136,7 @@ async def test_script_queued(
|
||||
# Test 1: Queue depth limit
|
||||
test_service = next((s for s in services if s.name == "test_queue_depth"), None)
|
||||
assert test_service is not None, "test_queue_depth service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test1_complete, timeout=2.0)
|
||||
await asyncio.sleep(0.1) # Give time for rejections
|
||||
|
||||
@@ -151,7 +151,7 @@ async def test_script_queued(
|
||||
# Test 2: Ring buffer order
|
||||
test_service = next((s for s in services if s.name == "test_ring_buffer"), None)
|
||||
assert test_service is not None, "test_ring_buffer service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test2_complete, timeout=2.0)
|
||||
|
||||
# Verify Test 2
|
||||
@@ -165,7 +165,7 @@ async def test_script_queued(
|
||||
# Test 3: Stop clears queue
|
||||
test_service = next((s for s in services if s.name == "test_stop_clears"), None)
|
||||
assert test_service is not None, "test_stop_clears service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test3_complete, timeout=2.0)
|
||||
|
||||
# Verify Test 3
|
||||
@@ -179,7 +179,7 @@ async def test_script_queued(
|
||||
# Test 4: Rejection enforcement (max_runs=3)
|
||||
test_service = next((s for s in services if s.name == "test_rejection"), None)
|
||||
assert test_service is not None, "test_rejection service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test4_complete, timeout=2.0)
|
||||
await asyncio.sleep(0.1) # Give time for rejections
|
||||
|
||||
@@ -194,7 +194,7 @@ async def test_script_queued(
|
||||
# Test 5: No parameters
|
||||
test_service = next((s for s in services if s.name == "test_no_params"), None)
|
||||
assert test_service is not None, "test_no_params service not found"
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
await asyncio.wait_for(test5_complete, timeout=2.0)
|
||||
|
||||
# Verify Test 5
|
||||
|
||||
@@ -86,7 +86,7 @@ async def test_wait_until_mid_loop_timing(
|
||||
assert test_service is not None, "test_mid_loop_timeout service not found"
|
||||
|
||||
# Execute the test
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
|
||||
# Wait for test to complete (100ms delay + 200ms timeout + margins = ~500ms)
|
||||
await asyncio.wait_for(test_complete, timeout=5.0)
|
||||
|
||||
@@ -74,7 +74,7 @@ async def test_wait_until_on_boot(
|
||||
)
|
||||
assert set_flag_service is not None, "set_test_flag service not found"
|
||||
|
||||
client.execute_service(set_flag_service, {})
|
||||
await client.execute_service(set_flag_service, {})
|
||||
|
||||
# If the fix works, wait_until's loop() will check the condition and proceed
|
||||
# If the bug exists, wait_until is stuck with disabled loop and will timeout
|
||||
|
||||
@@ -71,7 +71,7 @@ async def test_wait_until_fifo_ordering(
|
||||
assert test_service is not None, "test_wait_until_fifo service not found"
|
||||
|
||||
# Execute the test
|
||||
client.execute_service(test_service, {})
|
||||
await client.execute_service(test_service, {})
|
||||
|
||||
# Wait for test to complete
|
||||
try:
|
||||
|
||||
Reference in New Issue
Block a user