-
-
Notifications
You must be signed in to change notification settings - Fork 2.2k
adding support for async callbacks and page layouts #3089
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
e1002d5
e24e094
5d492dc
69ee069
950888c
916efdc
0dd778e
2d9a930
b2f9cd6
f1ac667
378099f
52de22e
4189ed6
0ddf254
76a9a56
337ec26
a730b4b
538515d
4f14a1a
96df44e
91400a3
1856f5c
671cb2b
f19fe23
17c824d
fdfd058
fb691c3
2473546
b2e8884
df29ee6
5342573
c79debf
37d649c
eaec04f
68320f4
5b201a1
1aea686
fc6c6bc
503f7c0
c26671e
20dcd67
bb3d881
c6940c7
b7a8c4a
51c1689
eb96c5d
832ab4d
d0b2dd3
e88bc3c
a1bd54f
7f35ecd
28b0693
443f785
07bcd38
e82f561
43fe153
6fd556d
abbfcd2
13a33be
e3a1496
e8bcb22
43d10c4
4b3c011
777a12b
769fc7e
12574d1
3c610b3
d955ce1
513408d
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,7 @@ | ||
import traceback | ||
from contextvars import copy_context | ||
import asyncio | ||
from functools import partial | ||
|
||
|
||
from . import BaseBackgroundCallbackManager | ||
|
@@ -16,7 +18,7 @@ class DiskcacheManager(BaseBackgroundCallbackManager): | |
|
||
def __init__(self, cache=None, cache_by=None, expire=None): | ||
""" | ||
Long callback manager that runs callback logic in a subprocess and stores | ||
Background callback manager that runs callback logic in a subprocess and stores | ||
results on disk using diskcache | ||
:param cache: | ||
|
@@ -39,7 +41,7 @@ def __init__(self, cache=None, cache_by=None, expire=None): | |
except ImportError as missing_imports: | ||
raise ImportError( | ||
"""\ | ||
DiskcacheLongCallbackManager requires extra dependencies which can be installed doing | ||
DiskcacheManager requires extra dependencies which can be installed doing | ||
$ pip install "dash[diskcache]"\n""" | ||
) from missing_imports | ||
|
@@ -117,16 +119,52 @@ def clear_cache_entry(self, key): | |
|
||
# noinspection PyUnresolvedReferences | ||
def call_job_fn(self, key, job_fn, args, context): | ||
""" | ||
Call the job function, supporting both sync and async jobs. | ||
Args: | ||
key: Cache key for the job. | ||
job_fn: The job function to execute. | ||
args: Arguments for the job function. | ||
context: Context for the job. | ||
Returns: | ||
The PID of the spawned process or None for async execution. | ||
""" | ||
# pylint: disable-next=import-outside-toplevel,no-name-in-module,import-error | ||
from multiprocess import Process # type: ignore | ||
|
||
# pylint: disable-next=not-callable | ||
proc = Process( | ||
process = Process( | ||
target=job_fn, | ||
args=(key, self._make_progress_key(key), args, context), | ||
) | ||
proc.start() | ||
return proc.pid | ||
process.start() | ||
return process.pid | ||
|
||
@staticmethod | ||
def _run_async_in_process(job_fn, key, args, context): | ||
""" | ||
Helper function to run an async job in a new process. | ||
Args: | ||
job_fn: The async job function. | ||
key: Cache key for the job. | ||
args: Arguments for the job function. | ||
context: Context for the job. | ||
""" | ||
# Create a new event loop for the process | ||
loop = asyncio.new_event_loop() | ||
asyncio.set_event_loop(loop) | ||
|
||
# Wrap the job function to include key and progress | ||
async_job = partial(job_fn, key, args, context) | ||
|
||
try: | ||
# Run the async job and wait for completion | ||
loop.run_until_complete(async_job()) | ||
except Exception as e: | ||
# Handle errors, log them, and cache if necessary | ||
raise Exception(str(e)) from e | ||
finally: | ||
loop.close() | ||
|
||
def get_progress(self, key): | ||
progress_key = self._make_progress_key(key) | ||
|
@@ -169,7 +207,9 @@ def get_updated_props(self, key): | |
return result | ||
|
||
|
||
# pylint: disable-next=too-many-statements | ||
def _make_job_fn(fn, cache, progress): | ||
# pylint: disable-next=too-many-statements | ||
def job_fn(result_key, progress_key, user_callback_args, context): | ||
def _set_progress(progress_value): | ||
if not isinstance(progress_value, (list, tuple)): | ||
|
@@ -216,7 +256,47 @@ def run(): | |
if not errored: | ||
cache.set(result_key, user_callback_output) | ||
|
||
ctx.run(run) | ||
async def async_run(): | ||
c = AttributeDict(**context) | ||
c.ignore_register_page = False | ||
c.updated_props = ProxySetProps(_set_props) | ||
context_value.set(c) | ||
errored = False | ||
try: | ||
if isinstance(user_callback_args, dict): | ||
user_callback_output = await fn( | ||
*maybe_progress, **user_callback_args | ||
) | ||
elif isinstance(user_callback_args, (list, tuple)): | ||
user_callback_output = await fn( | ||
*maybe_progress, *user_callback_args | ||
) | ||
else: | ||
user_callback_output = await fn(*maybe_progress, user_callback_args) | ||
except PreventUpdate: | ||
errored = True | ||
cache.set(result_key, {"_dash_no_update": "_dash_no_update"}) | ||
except Exception as err: # pylint: disable=broad-except | ||
errored = True | ||
cache.set( | ||
result_key, | ||
{ | ||
"background_callback_error": { | ||
"msg": str(err), | ||
"tb": traceback.format_exc(), | ||
} | ||
}, | ||
) | ||
if asyncio.iscoroutine(user_callback_output): | ||
user_callback_output = await user_callback_output | ||
if not errored: | ||
cache.set(result_key, user_callback_output) | ||
|
||
if asyncio.iscoroutinefunction(fn): | ||
func = partial(ctx.run, async_run) | ||
asyncio.run(func()) | ||
else: | ||
ctx.run(run) | ||
Comment on lines
+295
to
+299
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same comment as per celery_manager, but for this one I am pretty sure there is no loop running. |
||
|
||
return job_fn | ||
|
||
|
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -163,6 +163,12 @@ def percy_snapshot( | |
""" | ||
if widths is None: | ||
widths = [1280] | ||
try: | ||
import asgiref # pylint: disable=unused-import, import-outside-toplevel # noqa: F401, C0415 | ||
|
||
name += "_async" | ||
except ImportError: | ||
pass | ||
Comment on lines
+166
to
+171
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We don't really need the percy integration if we run the tests separately in a new github action.= |
||
|
||
logger.info("taking snapshot name => %s", name) | ||
try: | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -31,7 +31,7 @@ | |
"private::test.unit-dash": "pytest tests/unit", | ||
"private::test.unit-renderer": "cd dash/dash-renderer && npm run test", | ||
"private::test.unit-generation": "cd @plotly/dash-generator-test-component-typescript && npm ci && npm test", | ||
"private::test.integration-dash": "TESTFILES=$(circleci tests glob \"tests/integration/**/test_*.py\" | circleci tests split --split-by=timings) && pytest --headless --nopercyfinalize --junitxml=test-reports/junit_intg.xml ${TESTFILES}", | ||
"private::test.integration-dash": "TESTFILES=$(circleci tests glob \"tests/integration/**/test_*.py\" | circleci tests split --split-by=timings) && pytest --headless --nopercyfinalize --junitxml=test-reports/junit_intg.xml ${TESTFILES} && python rerun_failed_tests.py", | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We have a package |
||
"private::test.integration-dash-import": "cd tests/integration/dash && python dash_import_test.py", | ||
"cibuild": "run-s private::cibuild.*", | ||
"build": "run-p private::build.*", | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
flask[async] |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
import xml.etree.ElementTree as ET | ||
import subprocess | ||
|
||
def parse_test_results(file_path): | ||
tree = ET.parse(file_path) | ||
root = tree.getroot() | ||
failed_tests = [] | ||
for testcase in root.iter('testcase'): | ||
if testcase.find('failure') is not None: | ||
failed_tests.append(testcase.get('name')) | ||
return failed_tests | ||
|
||
def rerun_failed_tests(failed_tests): | ||
if failed_tests: | ||
print("Initial failed tests:", failed_tests) | ||
failed_test_names = ' '.join(failed_tests) | ||
result = subprocess.run(f'pytest --headless {failed_test_names}', shell=True, capture_output=True, text=True) | ||
print(result.stdout) | ||
print(result.stderr) | ||
else: | ||
print('All tests passed.') | ||
|
||
if __name__ == "__main__": | ||
failed_tests = parse_test_results('test-reports/junit_intg.xml') | ||
rerun_failed_tests(failed_tests) |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
import time | ||
|
||
from dash import Dash, Input, Output, dcc, html | ||
|
||
from .utils import get_background_callback_manager | ||
|
||
background_callback_manager = get_background_callback_manager() | ||
handle = background_callback_manager.handle | ||
|
||
app = Dash(__name__) | ||
app.layout = html.Div( | ||
[ | ||
dcc.Input(id="input", value="initial value"), | ||
html.Div(html.Div([1.5, None, "string", html.Div(id="output-1")])), | ||
] | ||
) | ||
|
||
|
||
@app.callback( | ||
Output("output-1", "children"), | ||
[Input("input", "value")], | ||
interval=500, | ||
manager=background_callback_manager, | ||
background=True, | ||
) | ||
async def update_output(value): | ||
time.sleep(0.1) | ||
return value | ||
|
||
|
||
if __name__ == "__main__": | ||
app.run(debug=True) |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
import time | ||
|
||
from dash import Dash, Input, Output, html, callback, set_props | ||
|
||
from .utils import get_background_callback_manager | ||
|
||
background_callback_manager = get_background_callback_manager() | ||
handle = background_callback_manager.handle | ||
|
||
app = Dash(__name__, background_callback_manager=background_callback_manager) | ||
|
||
app.layout = html.Div( | ||
[ | ||
html.Button("start", id="start"), | ||
html.Div(id="secondary"), | ||
html.Div(id="no-output"), | ||
html.Div("initial", id="output"), | ||
html.Button("start-no-output", id="start-no-output"), | ||
] | ||
) | ||
|
||
|
||
@callback( | ||
Output("output", "children"), | ||
Input("start", "n_clicks"), | ||
prevent_initial_call=True, | ||
background=True, | ||
interval=500, | ||
) | ||
async def on_click(_): | ||
set_props("secondary", {"children": "first"}) | ||
set_props("secondary", {"style": {"background": "red"}}) | ||
time.sleep(2) | ||
set_props("secondary", {"children": "second"}) | ||
return "completed" | ||
|
||
|
||
@callback( | ||
Input("start-no-output", "n_clicks"), | ||
prevent_initial_call=True, | ||
background=True, | ||
) | ||
async def on_click2(_): | ||
set_props("no-output", {"children": "started"}) | ||
time.sleep(2) | ||
set_props("no-output", {"children": "completed"}) | ||
|
||
|
||
if __name__ == "__main__": | ||
app.run(debug=True) |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
import os | ||
|
||
import pytest | ||
|
||
|
||
if "REDIS_URL" in os.environ: | ||
managers = ["celery", "diskcache"] | ||
else: | ||
print("Skipping celery tests because REDIS_URL is not defined") | ||
managers = ["diskcache"] | ||
|
||
|
||
@pytest.fixture(params=managers) | ||
def manager(request): | ||
return request.param |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,63 @@ | ||
import sys | ||
import time | ||
from multiprocessing import Lock | ||
|
||
import pytest | ||
from flaky import flaky | ||
|
||
from tests.utils import is_dash_async | ||
from .utils import setup_background_callback_app | ||
|
||
|
||
def test_001ab_arbitrary(dash_duo, manager): | ||
if not is_dash_async(): | ||
return | ||
with setup_background_callback_app(manager, "app_arbitrary_async") as app: | ||
dash_duo.start_server(app) | ||
|
||
dash_duo.wait_for_text_to_equal("#output", "initial") | ||
# pause for sync | ||
time.sleep(0.2) | ||
dash_duo.find_element("#start").click() | ||
|
||
dash_duo.wait_for_text_to_equal("#secondary", "first") | ||
dash_duo.wait_for_style_to_equal( | ||
"#secondary", "background-color", "rgba(255, 0, 0, 1)" | ||
) | ||
dash_duo.wait_for_text_to_equal("#output", "initial") | ||
dash_duo.wait_for_text_to_equal("#secondary", "second") | ||
dash_duo.wait_for_text_to_equal("#output", "completed") | ||
|
||
dash_duo.find_element("#start-no-output").click() | ||
|
||
dash_duo.wait_for_text_to_equal("#no-output", "started") | ||
dash_duo.wait_for_text_to_equal("#no-output", "completed") | ||
|
||
|
||
@pytest.mark.skipif( | ||
sys.version_info < (3, 7), reason="Python 3.6 long callbacks tests hangs up" | ||
) | ||
@flaky(max_runs=3) | ||
def test_002ab_basic(dash_duo, manager): | ||
""" | ||
Make sure that we settle to the correct final value when handling rapid inputs | ||
""" | ||
if not is_dash_async(): | ||
return | ||
lock = Lock() | ||
with setup_background_callback_app(manager, "app1_async") as app: | ||
dash_duo.start_server(app) | ||
dash_duo.wait_for_text_to_equal("#output-1", "initial value", 15) | ||
input_ = dash_duo.find_element("#input") | ||
# pause for sync | ||
time.sleep(0.2) | ||
dash_duo.clear_input(input_) | ||
|
||
for key in "hello world": | ||
with lock: | ||
input_.send_keys(key) | ||
|
||
dash_duo.wait_for_text_to_equal("#output-1", "hello world", 8) | ||
|
||
assert not dash_duo.redux_state_is_loading | ||
assert dash_duo.get_logs() == [] |
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,155 @@ | ||
# pylint: disable=import-outside-toplevel,global-statement,subprocess-popen-preexec-fn,W0201 | ||
|
||
import os | ||
import shutil | ||
import subprocess | ||
import tempfile | ||
import time | ||
from contextlib import contextmanager | ||
|
||
import psutil | ||
import redis | ||
|
||
from dash.background_callback import DiskcacheManager | ||
|
||
manager = None | ||
|
||
|
||
class TestDiskCacheManager(DiskcacheManager): | ||
def __init__(self, cache=None, cache_by=None, expire=None): | ||
super().__init__(cache=cache, cache_by=cache_by, expire=expire) | ||
self.running_jobs = [] | ||
|
||
def call_job_fn( | ||
self, | ||
key, | ||
job_fn, | ||
args, | ||
context, | ||
): | ||
pid = super().call_job_fn(key, job_fn, args, context) | ||
self.running_jobs.append(pid) | ||
return pid | ||
|
||
|
||
def get_background_callback_manager(): | ||
""" | ||
Get the long callback mangaer configured by environment variables | ||
""" | ||
if os.environ.get("LONG_CALLBACK_MANAGER", None) == "celery": | ||
from dash.background_callback import CeleryManager | ||
from celery import Celery | ||
|
||
celery_app = Celery( | ||
__name__, | ||
broker=os.environ.get("CELERY_BROKER"), | ||
backend=os.environ.get("CELERY_BACKEND"), | ||
) | ||
background_callback_manager = CeleryManager(celery_app) | ||
redis_conn = redis.Redis(host="localhost", port=6379, db=1) | ||
background_callback_manager.test_lock = redis_conn.lock("test-lock") | ||
elif os.environ.get("LONG_CALLBACK_MANAGER", None) == "diskcache": | ||
import diskcache | ||
|
||
cache = diskcache.Cache(os.environ.get("DISKCACHE_DIR")) | ||
background_callback_manager = TestDiskCacheManager(cache) | ||
background_callback_manager.test_lock = diskcache.Lock(cache, "test-lock") | ||
else: | ||
raise ValueError( | ||
"Invalid long callback manager specified as LONG_CALLBACK_MANAGER " | ||
"environment variable" | ||
) | ||
|
||
global manager | ||
manager = background_callback_manager | ||
|
||
return background_callback_manager | ||
|
||
|
||
def kill(proc_pid): | ||
process = psutil.Process(proc_pid) | ||
for proc in process.children(recursive=True): | ||
proc.kill() | ||
process.kill() | ||
|
||
|
||
@contextmanager | ||
def setup_background_callback_app(manager_name, app_name): | ||
from dash.testing.application_runners import import_app | ||
|
||
if manager_name == "celery": | ||
os.environ["LONG_CALLBACK_MANAGER"] = "celery" | ||
redis_url = os.environ["REDIS_URL"].rstrip("/") | ||
os.environ["CELERY_BROKER"] = f"{redis_url}/0" | ||
os.environ["CELERY_BACKEND"] = f"{redis_url}/1" | ||
|
||
# Clear redis of cached values | ||
redis_conn = redis.Redis(host="localhost", port=6379, db=1) | ||
cache_keys = redis_conn.keys() | ||
if cache_keys: | ||
redis_conn.delete(*cache_keys) | ||
|
||
worker = subprocess.Popen( | ||
[ | ||
"celery", | ||
"-A", | ||
f"tests.async_tests.{app_name}:handle", | ||
"worker", | ||
"-P", | ||
"prefork", | ||
"--concurrency", | ||
"2", | ||
"--loglevel=info", | ||
], | ||
encoding="utf8", | ||
preexec_fn=os.setpgrp, | ||
stderr=subprocess.PIPE, | ||
) | ||
# Wait for the worker to be ready, if you cancel before it is ready, the job | ||
# will still be queued. | ||
lines = [] | ||
for line in iter(worker.stderr.readline, ""): | ||
if "ready" in line: | ||
break | ||
lines.append(line) | ||
else: | ||
error = "\n".join(lines) | ||
raise RuntimeError(f"celery failed to start: {error}") | ||
|
||
try: | ||
yield import_app(f"tests.async_tests.{app_name}") | ||
finally: | ||
# Interval may run one more time after settling on final app state | ||
# Sleep for 1 interval of time | ||
time.sleep(0.5) | ||
os.environ.pop("LONG_CALLBACK_MANAGER") | ||
os.environ.pop("CELERY_BROKER") | ||
os.environ.pop("CELERY_BACKEND") | ||
kill(worker.pid) | ||
from dash import page_registry | ||
|
||
page_registry.clear() | ||
|
||
elif manager_name == "diskcache": | ||
os.environ["LONG_CALLBACK_MANAGER"] = "diskcache" | ||
cache_directory = tempfile.mkdtemp(prefix="lc-diskcache-") | ||
print(cache_directory) | ||
os.environ["DISKCACHE_DIR"] = cache_directory | ||
try: | ||
app = import_app(f"tests.async_tests.{app_name}") | ||
yield app | ||
finally: | ||
# Interval may run one more time after settling on final app state | ||
# Sleep for a couple of intervals | ||
time.sleep(2.0) | ||
|
||
if hasattr(manager, "running_jobs"): | ||
for job in manager.running_jobs: | ||
manager.terminate_job(job) | ||
|
||
shutil.rmtree(cache_directory, ignore_errors=True) | ||
os.environ.pop("LONG_CALLBACK_MANAGER") | ||
os.environ.pop("DISKCACHE_DIR") | ||
from dash import page_registry | ||
|
||
page_registry.clear() |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
def is_dash_async(): | ||
try: | ||
|
||
import asgiref # pylint: disable=unused-import, # noqa: F401 | ||
|
||
return True | ||
except ImportError: | ||
return False |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Are we guaranteed to be out of the asyncio loop? In which case, the run might fail. Otherwise it might be better to only have the async_run definition so there is less code repetition.