mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-09-13 13:05:07 +00:00
Just disable timing-related assertions in CI
That way there's no risk of periodic non-deterministic test failures.
This commit is contained in:
6
.github/workflows/test-execution.yml
vendored
6
.github/workflows/test-execution.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
|||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
|
pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
pip install -r tests-unit/requirements.txt
|
pip install websocket-client
|
||||||
- name: Run Execution Tests
|
- name: Run Execution Tests (with timing checks disabled)
|
||||||
run: |
|
run: |
|
||||||
python -m pytest tests/execution -v
|
python -m pytest tests/execution -v --skip-timing-checks
|
||||||
|
@@ -6,6 +6,7 @@ def pytest_addoption(parser):
|
|||||||
parser.addoption('--output_dir', action="store", default='tests/inference/samples', help='Output directory for generated images')
|
parser.addoption('--output_dir', action="store", default='tests/inference/samples', help='Output directory for generated images')
|
||||||
parser.addoption("--listen", type=str, default="127.0.0.1", metavar="IP", nargs="?", const="0.0.0.0", help="Specify the IP address to listen on (default: 127.0.0.1). If --listen is provided without an argument, it defaults to 0.0.0.0. (listens on all)")
|
parser.addoption("--listen", type=str, default="127.0.0.1", metavar="IP", nargs="?", const="0.0.0.0", help="Specify the IP address to listen on (default: 127.0.0.1). If --listen is provided without an argument, it defaults to 0.0.0.0. (listens on all)")
|
||||||
parser.addoption("--port", type=int, default=8188, help="Set the listen port.")
|
parser.addoption("--port", type=int, default=8188, help="Set the listen port.")
|
||||||
|
parser.addoption("--skip-timing-checks", action="store_true", default=False, help="Skip timing-related assertions in tests (useful for CI environments with variable performance)")
|
||||||
|
|
||||||
# This initializes args at the beginning of the test session
|
# This initializes args at the beginning of the test session
|
||||||
@pytest.fixture(scope="session", autouse=True)
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
@@ -19,6 +20,11 @@ def args_pytest(pytestconfig):
|
|||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def skip_timing_checks(pytestconfig):
|
||||||
|
"""Fixture that returns whether timing checks should be skipped."""
|
||||||
|
return pytestconfig.getoption("--skip-timing-checks")
|
||||||
|
|
||||||
def pytest_collection_modifyitems(items):
|
def pytest_collection_modifyitems(items):
|
||||||
# Modifies items so tests run in the correct order
|
# Modifies items so tests run in the correct order
|
||||||
|
|
||||||
|
@@ -81,7 +81,7 @@ class TestAsyncNodes:
|
|||||||
assert len(result_images) == 1, "Should have 1 image"
|
assert len(result_images) == 1, "Should have 1 image"
|
||||||
assert np.array(result_images[0]).min() == 0 and np.array(result_images[0]).max() == 0, "Image should be black"
|
assert np.array(result_images[0]).min() == 0 and np.array(result_images[0]).max() == 0, "Image should be black"
|
||||||
|
|
||||||
def test_multiple_async_parallel_execution(self, client: ComfyClient, builder: GraphBuilder):
|
def test_multiple_async_parallel_execution(self, client: ComfyClient, builder: GraphBuilder, skip_timing_checks):
|
||||||
"""Test that multiple async nodes execute in parallel."""
|
"""Test that multiple async nodes execute in parallel."""
|
||||||
# Warmup execution to ensure server is fully initialized
|
# Warmup execution to ensure server is fully initialized
|
||||||
run_warmup(client)
|
run_warmup(client)
|
||||||
@@ -104,6 +104,7 @@ class TestAsyncNodes:
|
|||||||
elapsed_time = time.time() - start_time
|
elapsed_time = time.time() - start_time
|
||||||
|
|
||||||
# Should take ~0.5s (max duration) not 1.2s (sum of durations)
|
# Should take ~0.5s (max duration) not 1.2s (sum of durations)
|
||||||
|
if not skip_timing_checks:
|
||||||
assert elapsed_time < 0.8, f"Parallel execution took {elapsed_time}s, expected < 0.8s"
|
assert elapsed_time < 0.8, f"Parallel execution took {elapsed_time}s, expected < 0.8s"
|
||||||
|
|
||||||
# Verify all nodes executed
|
# Verify all nodes executed
|
||||||
@@ -150,7 +151,7 @@ class TestAsyncNodes:
|
|||||||
with pytest.raises(urllib.error.HTTPError):
|
with pytest.raises(urllib.error.HTTPError):
|
||||||
client.run(g)
|
client.run(g)
|
||||||
|
|
||||||
def test_async_lazy_evaluation(self, client: ComfyClient, builder: GraphBuilder):
|
def test_async_lazy_evaluation(self, client: ComfyClient, builder: GraphBuilder, skip_timing_checks):
|
||||||
"""Test async nodes with lazy evaluation."""
|
"""Test async nodes with lazy evaluation."""
|
||||||
# Warmup execution to ensure server is fully initialized
|
# Warmup execution to ensure server is fully initialized
|
||||||
run_warmup(client, prefix="warmup_lazy")
|
run_warmup(client, prefix="warmup_lazy")
|
||||||
@@ -173,6 +174,7 @@ class TestAsyncNodes:
|
|||||||
elapsed_time = time.time() - start_time
|
elapsed_time = time.time() - start_time
|
||||||
|
|
||||||
# Should only execute sleep1, not sleep2
|
# Should only execute sleep1, not sleep2
|
||||||
|
if not skip_timing_checks:
|
||||||
assert elapsed_time < 0.5, f"Should skip sleep2, took {elapsed_time}s"
|
assert elapsed_time < 0.5, f"Should skip sleep2, took {elapsed_time}s"
|
||||||
assert result.did_run(sleep1), "Sleep1 should have executed"
|
assert result.did_run(sleep1), "Sleep1 should have executed"
|
||||||
assert not result.did_run(sleep2), "Sleep2 should have been skipped"
|
assert not result.did_run(sleep2), "Sleep2 should have been skipped"
|
||||||
@@ -310,7 +312,7 @@ class TestAsyncNodes:
|
|||||||
images = result.get_images(output)
|
images = result.get_images(output)
|
||||||
assert len(images) == 1, "Should have blocked second image"
|
assert len(images) == 1, "Should have blocked second image"
|
||||||
|
|
||||||
def test_async_caching_behavior(self, client: ComfyClient, builder: GraphBuilder):
|
def test_async_caching_behavior(self, client: ComfyClient, builder: GraphBuilder, skip_timing_checks):
|
||||||
"""Test that async nodes are properly cached."""
|
"""Test that async nodes are properly cached."""
|
||||||
# Warmup execution to ensure server is fully initialized
|
# Warmup execution to ensure server is fully initialized
|
||||||
run_warmup(client, prefix="warmup_cache")
|
run_warmup(client, prefix="warmup_cache")
|
||||||
@@ -330,9 +332,10 @@ class TestAsyncNodes:
|
|||||||
elapsed_time = time.time() - start_time
|
elapsed_time = time.time() - start_time
|
||||||
|
|
||||||
assert not result2.did_run(sleep_node), "Should be cached"
|
assert not result2.did_run(sleep_node), "Should be cached"
|
||||||
|
if not skip_timing_checks:
|
||||||
assert elapsed_time < 0.1, f"Cached run took {elapsed_time}s, should be instant"
|
assert elapsed_time < 0.1, f"Cached run took {elapsed_time}s, should be instant"
|
||||||
|
|
||||||
def test_async_with_dynamic_prompts(self, client: ComfyClient, builder: GraphBuilder):
|
def test_async_with_dynamic_prompts(self, client: ComfyClient, builder: GraphBuilder, skip_timing_checks):
|
||||||
"""Test async nodes within dynamically generated prompts."""
|
"""Test async nodes within dynamically generated prompts."""
|
||||||
# Warmup execution to ensure server is fully initialized
|
# Warmup execution to ensure server is fully initialized
|
||||||
run_warmup(client, prefix="warmup_dynamic")
|
run_warmup(client, prefix="warmup_dynamic")
|
||||||
@@ -354,6 +357,7 @@ class TestAsyncNodes:
|
|||||||
elapsed_time = time.time() - start_time
|
elapsed_time = time.time() - start_time
|
||||||
|
|
||||||
# Should execute async nodes in parallel within dynamic prompt
|
# Should execute async nodes in parallel within dynamic prompt
|
||||||
|
if not skip_timing_checks:
|
||||||
assert elapsed_time < 1.0, f"Dynamic async execution took {elapsed_time}s"
|
assert elapsed_time < 1.0, f"Dynamic async execution took {elapsed_time}s"
|
||||||
assert result.did_run(dynamic_async)
|
assert result.did_run(dynamic_async)
|
||||||
|
|
||||||
|
@@ -518,7 +518,7 @@ class TestExecution:
|
|||||||
assert numpy.array(images[0]).min() == 63 and numpy.array(images[0]).max() == 63, "Image should have value 0.25"
|
assert numpy.array(images[0]).min() == 63 and numpy.array(images[0]).max() == 63, "Image should have value 0.25"
|
||||||
assert not result.did_run(test_node), "The execution should have been cached"
|
assert not result.did_run(test_node), "The execution should have been cached"
|
||||||
|
|
||||||
def test_parallel_sleep_nodes(self, client: ComfyClient, builder: GraphBuilder):
|
def test_parallel_sleep_nodes(self, client: ComfyClient, builder: GraphBuilder, skip_timing_checks):
|
||||||
# Warmup execution to ensure server is fully initialized
|
# Warmup execution to ensure server is fully initialized
|
||||||
run_warmup(client)
|
run_warmup(client)
|
||||||
|
|
||||||
@@ -541,6 +541,7 @@ class TestExecution:
|
|||||||
|
|
||||||
# The test should take around 3.0 seconds (the longest sleep duration)
|
# The test should take around 3.0 seconds (the longest sleep duration)
|
||||||
# plus some overhead, but definitely less than the sum of all sleeps (9.0s)
|
# plus some overhead, but definitely less than the sum of all sleeps (9.0s)
|
||||||
|
if not skip_timing_checks:
|
||||||
assert elapsed_time < 8.9, f"Parallel execution took {elapsed_time}s, expected less than 8.9s"
|
assert elapsed_time < 8.9, f"Parallel execution took {elapsed_time}s, expected less than 8.9s"
|
||||||
|
|
||||||
# Verify that all nodes executed
|
# Verify that all nodes executed
|
||||||
@@ -548,7 +549,7 @@ class TestExecution:
|
|||||||
assert result.did_run(sleep_node2), "Sleep node 2 should have run"
|
assert result.did_run(sleep_node2), "Sleep node 2 should have run"
|
||||||
assert result.did_run(sleep_node3), "Sleep node 3 should have run"
|
assert result.did_run(sleep_node3), "Sleep node 3 should have run"
|
||||||
|
|
||||||
def test_parallel_sleep_expansion(self, client: ComfyClient, builder: GraphBuilder):
|
def test_parallel_sleep_expansion(self, client: ComfyClient, builder: GraphBuilder, skip_timing_checks):
|
||||||
# Warmup execution to ensure server is fully initialized
|
# Warmup execution to ensure server is fully initialized
|
||||||
run_warmup(client)
|
run_warmup(client)
|
||||||
|
|
||||||
@@ -575,6 +576,7 @@ class TestExecution:
|
|||||||
# Similar to the previous test, expect parallel execution of the sleep nodes
|
# Similar to the previous test, expect parallel execution of the sleep nodes
|
||||||
# which should complete in less than the sum of all sleeps
|
# which should complete in less than the sum of all sleeps
|
||||||
# Lots of leeway here since Windows CI is slow
|
# Lots of leeway here since Windows CI is slow
|
||||||
|
if not skip_timing_checks:
|
||||||
assert elapsed_time < 13.0, f"Expansion execution took {elapsed_time}s"
|
assert elapsed_time < 13.0, f"Expansion execution took {elapsed_time}s"
|
||||||
|
|
||||||
# Verify the parallel sleep node executed
|
# Verify the parallel sleep node executed
|
||||||
|
Reference in New Issue
Block a user