diff --git a/.github/workflows/test-e2e-vizro-performance.yml b/.github/workflows/test-e2e-vizro-performance.yml new file mode 100644 index 000000000..f36a10562 --- /dev/null +++ b/.github/workflows/test-e2e-vizro-performance.yml @@ -0,0 +1,47 @@ +name: e2e vizro performance tests + +defaults: + run: + working-directory: vizro-core + +permissions: + contents: read + +on: + push: + branches: + - main + pull_request: + branches: + +env: + PYTHONUNBUFFERED: 1 + FORCE_COLOR: 1 + PYTHON_VERSION: "3.13" + +jobs: + test-e2e-vizro-performance-tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: Install Hatch + run: pip install hatch + - name: Install Playwright and browser + run: | + hatch run pip install playwright + hatch run playwright install --with-deps chromium + - name: Run tests + run: | + hatch run test-e2e-vizro-performance + + - name: Create artifacts + uses: actions/upload-artifact@v5 + if: failure() + with: + name: performance-artifacts + path: | + /home/runner/work/vizro/vizro/vizro-core/*.png diff --git a/vizro-core/hatch.toml b/vizro-core/hatch.toml index 8ddf5205a..8442d5d31 100644 --- a/vizro-core/hatch.toml +++ b/vizro-core/hatch.toml @@ -91,6 +91,11 @@ test-e2e-vizro-http-requests = [ "tests/tests_utils/e2e/vizro/dashboards/wait-for-it.sh 127.0.0.1:5002 -t 30", "pytest -vs --reruns 1 tests/e2e/vizro/test_http_requests --headless {args}" ] +test-e2e-vizro-performance = [ + "gunicorn dashboard_performance:app -b 0.0.0.0:5002 -w 1 --timeout 90 &", + "tests/tests_utils/e2e/vizro/dashboards/wait-for-it.sh 127.0.0.1:5002 -t 30", + "pytest -vs --reruns 1 tests/e2e/vizro/test_performance --headless {args}" +] test-e2e-vizro-screenshots = [ "gunicorn dashboard:app -b 0.0.0.0:5002 -w 1 --timeout 90 &", "gunicorn dashboard_one_page:app -b 0.0.0.0:5003 -w 1 --timeout 90 &", diff --git a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard.py b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard.py index 2a9b54cde..d06c889a0 100644 --- a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard.py +++ b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard.py @@ -178,4 +178,4 @@ app = Vizro(assets_folder="../assets").build(dashboard) if __name__ == "__main__": - app.run(debug=True) + app.run() diff --git a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_http_requests.py b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_http_requests.py index fe3ad5038..2ce6b9392 100644 --- a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_http_requests.py +++ b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_http_requests.py @@ -304,4 +304,4 @@ def my_custom_action(t: int): app = Vizro().build(dashboard) if __name__ == "__main__": - app.run(debug=True) + app.run() diff --git a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_accordions.py b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_accordions.py index d4e2afba7..177a686b0 100644 --- a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_accordions.py +++ b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_accordions.py @@ -37,4 +37,4 @@ ) if __name__ == "__main__": - app.run(debug=True) + app.run() diff --git a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_navlink.py b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_navlink.py index d55d5833c..7fc5b45f7 100644 --- a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_navlink.py +++ b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_navlink.py @@ -45,4 +45,4 @@ app = Vizro(assets_folder="../assets").build(dashboard) if __name__ == "__main__": - app.run(debug=True) + app.run() diff --git a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_pages.py b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_pages.py index 13f8ee621..ca19e0c81 100644 --- a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_pages.py +++ b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_navbar_pages.py @@ -44,4 +44,4 @@ ) if __name__ == "__main__": - app.run(debug=True) + app.run() diff --git a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_one_page.py b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_one_page.py index 88768bbde..80dcff3f1 100644 --- a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_one_page.py +++ b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_one_page.py @@ -12,4 +12,4 @@ app = Vizro(assets_folder="../assets").build(dashboard) if __name__ == "__main__": - app.run(debug=True) + app.run() diff --git a/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_performance.py b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_performance.py new file mode 100644 index 000000000..62bee826e --- /dev/null +++ b/vizro-core/tests/e2e/vizro/dashboards/default/dashboard_performance.py @@ -0,0 +1,42 @@ +import pandas as pd + +import vizro.models as vm +import vizro.plotly.express as px +from vizro import Vizro + +df_list = [px.data.iris()] +for i in range(1, 33333): + df_list.append(px.data.iris()) # noqa PERF401 +iris = pd.concat(df_list) + + +page_with_chart_filter_parameter = vm.Page( + title="Page with chart, filter and parameter", + components=[ + vm.Graph( + id="histogram_chart", + figure=px.histogram( + iris, + x="sepal_length", + y="petal_width", + color="sepal_width", + ), + ), + ], + controls=[ + vm.Filter(column="species", selector=vm.Dropdown()), + vm.Parameter( + targets=["histogram_chart.y"], + selector=vm.RadioItems(options=["petal_width", "petal_length"], value="petal_width"), + ), + ], +) + + +dashboard = vm.Dashboard(pages=[page_with_chart_filter_parameter]) + +app = Vizro().build(dashboard) +app.dash.enable_dev_tools(debug=True, dev_tools_ui=True, dev_tools_hot_reload=False) + +if __name__ == "__main__": + app.run() diff --git a/vizro-core/tests/e2e/vizro/test_performance/test_performance.py b/vizro-core/tests/e2e/vizro/test_performance/test_performance.py new file mode 100644 index 000000000..94d58a310 --- /dev/null +++ b/vizro-core/tests/e2e/vizro/test_performance/test_performance.py @@ -0,0 +1,80 @@ +import re + +from hamcrest import assert_that +from playwright.sync_api import sync_playwright +from werkzeug.http import parse_options_header + + +def performance(func): + """Decorator for setting up playwright logic.""" + + def wrapper(request): + """Simple test to measure request timings.""" + with sync_playwright() as p: + # selecting the Chromium browser engine which starts a new browser instance + browser = p.chromium.launch() + # creating browser context - clean, isolated browser profile + context = browser.new_context(viewport={"width": 1920, "height": 1080}) + # creating a new page inside the context + page = context.new_page() + response_times = [] + + def on_request(request): + if any(r in request.url for r in ["_dash-update-component"]): + timing = request.timing + request_type = re.findall(r"__(.*?)_[0-9a-f-]{8,}", request.post_data_json["output"]) + request_time = timing["responseEnd"] - timing["requestStart"] + server_timing = parse_options_header(request.response().all_headers().get("server-timing", ""))[ + 1 + ].get("dur", 0) + if request_type: + response_times.append( + { + "request_type": request_type[0], + "request_time": request_time, + "server_timing": int(server_timing), + } + ) + + page.on("requestfinished", on_request) + + page.goto("http://127.0.0.1:5002/") + + func(page, response_times) + + browser.close() + + return wrapper + + +@performance +def test_time(page, response_times): + """Simple test to measure request timings.""" + page.locator(".dash-dropdown .Select-value-icon:nth-of-type(1)").nth(0).click() + page.get_by_text("petal_length").click() + page.wait_for_load_state("networkidle") + print(response_times) # noqa + assert_that( + response_times[0]["request_time"] < 1500, + reason=f"request time for {response_times[0]['request_type']} is higher than 1500ms", + ) + assert_that( + response_times[1]["request_time"] < 5000, + reason=f"request time for {response_times[1]['request_type']} is higher than 5000ms", + ) + assert_that( + response_times[2]["request_time"] < 5000, + reason=f"request time for {response_times[2]['request_type']} is higher than 5000ms", + ) + assert_that( + response_times[0]["server_timing"] < 1300, + reason=f"server timing for {response_times[0]['request_type']} is higher than 1300ms", + ) + assert_that( + response_times[1]["server_timing"] < 1300, + reason=f"server timing for {response_times[1]['request_type']} is higher than 1300ms", + ) + assert_that( + response_times[2]["server_timing"] < 1300, + reason=f"request time for {response_times[2]['request_type']} is higher than 13000ms", + )