Compare commits
39 Commits
imports-fo
...
providers/
Author | SHA1 | Date | |
---|---|---|---|
0f2ddb1997 | |||
b6ab7d9db2 | |||
691c60f4be | |||
56630b0b4c | |||
9503c8d2e2 | |||
cecaea28c8 | |||
e4e2067c94 | |||
b43b9b8682 | |||
0343999e52 | |||
5c856d6d06 | |||
3c92b0e137 | |||
1a39f1902b | |||
e3f5058cbe | |||
ad872379e3 | |||
005c2440de | |||
49f306cd94 | |||
9c394a03e6 | |||
f1755652eb | |||
873dd95641 | |||
0ca4775600 | |||
7e4c9bed3d | |||
573c7a09e6 | |||
bf1d80d265 | |||
107e9b789d | |||
3e2c1a9501 | |||
cefb99ec7d | |||
35391dcd3b | |||
e76071b79f | |||
0184f8cbba | |||
e42c6c6fea | |||
ef779294bf | |||
ad159d9a3f | |||
125cfe938d | |||
ad376de981 | |||
9f1670f965 | |||
8dd4709981 | |||
b25e68486a | |||
80186fc625 | |||
ce19332450 |
56
.github/workflows/ci-main.yml
vendored
56
.github/workflows/ci-main.yml
vendored
@ -226,6 +226,61 @@ jobs:
|
||||
flags: e2e
|
||||
file: unittest.xml
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
test-conformance:
|
||||
name: test-conformance (${{ matrix.job.name }})
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- name: basic
|
||||
glob: tests/openid_conformance/test_basic.py
|
||||
- name: implicit
|
||||
glob: tests/openid_conformance/test_implicit.py
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup authentik env
|
||||
uses: ./.github/actions/setup
|
||||
- name: Setup e2e env (chrome, etc)
|
||||
run: |
|
||||
docker compose -f tests/e2e/docker-compose.yml up -d --quiet-pull
|
||||
- name: Setup conformance suite
|
||||
run: |
|
||||
docker compose -f tests/openid_conformance/compose.yml up -d --quiet-pull
|
||||
- id: cache-web
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: web/dist
|
||||
key: ${{ runner.os }}-web-${{ hashFiles('web/package-lock.json', 'web/src/**', 'web/packages/sfe/src/**') }}-b
|
||||
- name: prepare web ui
|
||||
if: steps.cache-web.outputs.cache-hit != 'true'
|
||||
working-directory: web
|
||||
run: |
|
||||
npm ci
|
||||
make -C .. gen-client-ts
|
||||
npm run build
|
||||
npm run build:sfe
|
||||
- name: run conformance
|
||||
run: |
|
||||
uv run coverage run manage.py test ${{ matrix.job.glob }}
|
||||
uv run coverage xml
|
||||
- if: ${{ always() }}
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
flags: conformance
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
flags: conformance
|
||||
file: unittest.xml
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- if: ${{ !cancelled() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: conformance-certification-${{ matrix.job.glob }}
|
||||
path: tests/openid_conformance/exports/
|
||||
ci-core-mark:
|
||||
if: always()
|
||||
needs:
|
||||
@ -235,6 +290,7 @@ jobs:
|
||||
- test-unittest
|
||||
- test-integration
|
||||
- test-e2e
|
||||
- test-conformance
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: re-actors/alls-green@release/v1
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -217,3 +217,4 @@ source_docs/
|
||||
|
||||
### Docker ###
|
||||
docker-compose.override.yml
|
||||
tests/openid_conformance/exports/*.zip
|
||||
|
@ -104,6 +104,7 @@ def get_logger_config():
|
||||
"hpack": "WARNING",
|
||||
"httpx": "WARNING",
|
||||
"azure": "WARNING",
|
||||
"httpcore": "WARNING",
|
||||
}
|
||||
for handler_name, level in handler_level_map.items():
|
||||
base_config["loggers"][handler_name] = {
|
||||
|
@ -96,6 +96,7 @@ dev = [
|
||||
"pytest-django==4.11.1",
|
||||
"pytest-github-actions-annotate-failures==0.3.0",
|
||||
"pytest-randomly==3.16.0",
|
||||
"pytest-subtests>=0.14.1",
|
||||
"pytest-timeout==2.4.0",
|
||||
"requests-mock==1.12.1",
|
||||
"ruff==0.11.9",
|
||||
|
@ -6,6 +6,8 @@ services:
|
||||
- /dev/shm:/dev/shm
|
||||
network_mode: host
|
||||
restart: always
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
mailpit:
|
||||
image: docker.io/axllent/mailpit:v1.26.2
|
||||
ports:
|
||||
|
@ -165,6 +165,7 @@ class SeleniumTestCase(DockerTestCase, StaticLiveServerTestCase):
|
||||
def _get_driver(self) -> WebDriver:
|
||||
count = 0
|
||||
opts = webdriver.ChromeOptions()
|
||||
opts.accept_insecure_certs = True
|
||||
opts.add_argument("--disable-search-engine-choice-screen")
|
||||
# This breaks selenium when running remotely...?
|
||||
# opts.set_capability("goog:loggingPrefs", {"browser": "ALL"})
|
||||
@ -249,7 +250,6 @@ class SeleniumTestCase(DockerTestCase, StaticLiveServerTestCase):
|
||||
|
||||
def login(self, shadow_dom=True):
|
||||
"""Do entire login flow"""
|
||||
|
||||
if shadow_dom:
|
||||
flow_executor = self.get_shadow_root("ak-flow-executor")
|
||||
identification_stage = self.get_shadow_root("ak-stage-identification", flow_executor)
|
||||
|
0
tests/openid_conformance/__init__.py
Normal file
0
tests/openid_conformance/__init__.py
Normal file
156
tests/openid_conformance/base.py
Normal file
156
tests/openid_conformance/base.py
Normal file
@ -0,0 +1,156 @@
|
||||
from json import dumps
|
||||
from os import makedirs
|
||||
from pathlib import Path
|
||||
from time import sleep
|
||||
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.support import expected_conditions as ec
|
||||
|
||||
from authentik.blueprints.tests import apply_blueprint, reconcile_app
|
||||
from authentik.providers.oauth2.models import OAuth2Provider
|
||||
from tests.e2e.utils import SeleniumTestCase
|
||||
from tests.openid_conformance.conformance import Conformance
|
||||
|
||||
|
||||
class TestOpenIDConformance(SeleniumTestCase):
|
||||
|
||||
conformance: Conformance
|
||||
|
||||
@apply_blueprint(
|
||||
"default/flow-default-authentication-flow.yaml",
|
||||
"default/flow-default-invalidation-flow.yaml",
|
||||
)
|
||||
@apply_blueprint(
|
||||
"default/flow-default-provider-authorization-implicit-consent.yaml",
|
||||
"default/flow-default-provider-invalidation.yaml",
|
||||
)
|
||||
@apply_blueprint("system/providers-oauth2.yaml")
|
||||
@reconcile_app("authentik_crypto")
|
||||
@apply_blueprint("testing/oidc-conformance.yaml")
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
makedirs(Path(__file__).parent / "exports", exist_ok=True)
|
||||
provider_a = OAuth2Provider.objects.get(
|
||||
client_id="4054d882aff59755f2f279968b97ce8806a926e1"
|
||||
)
|
||||
provider_b = OAuth2Provider.objects.get(
|
||||
client_id="ad64aeaf1efe388ecf4d28fcc537e8de08bcae26"
|
||||
)
|
||||
self.test_plan_config = {
|
||||
"alias": "authentik",
|
||||
"description": "authentik",
|
||||
"server": {
|
||||
"discoveryUrl": self.url(
|
||||
"authentik_providers_oauth2:provider-info",
|
||||
application_slug="oidc-conformance-1",
|
||||
),
|
||||
},
|
||||
"client": {
|
||||
"client_id": "4054d882aff59755f2f279968b97ce8806a926e1",
|
||||
"client_secret": provider_a.client_secret,
|
||||
},
|
||||
"client_secret_post": {
|
||||
"client_id": "4054d882aff59755f2f279968b97ce8806a926e1",
|
||||
"client_secret": provider_a.client_secret,
|
||||
},
|
||||
"client2": {
|
||||
"client_id": "ad64aeaf1efe388ecf4d28fcc537e8de08bcae26",
|
||||
"client_secret": provider_b.client_secret,
|
||||
},
|
||||
"consent": {},
|
||||
}
|
||||
self.test_variant = {
|
||||
"server_metadata": "discovery",
|
||||
"client_registration": "static_client",
|
||||
}
|
||||
|
||||
def run_test(self, test_plan: str, test_plan_config: dict):
|
||||
# Create a Conformance instance...
|
||||
self.conformance = Conformance(f"https://{self.host}:8443/", None, verify_ssl=False)
|
||||
|
||||
test_plan = self.conformance.create_test_plan(
|
||||
test_plan,
|
||||
dumps(test_plan_config),
|
||||
self.test_variant,
|
||||
)
|
||||
plan_id = test_plan["id"]
|
||||
for test in test_plan["modules"]:
|
||||
with self.subTest(test["testModule"], **test["variant"]):
|
||||
# Fetch name and variant of the next test to run
|
||||
module_name = test["testModule"]
|
||||
variant = test["variant"]
|
||||
module_instance = self.conformance.create_test_from_plan_with_variant(
|
||||
plan_id, module_name, variant
|
||||
)
|
||||
module_id = module_instance["id"]
|
||||
self.run_single_test(module_id)
|
||||
self.conformance.wait_for_state(module_id, ["FINISHED"], timeout=self.wait_timeout)
|
||||
sleep(2)
|
||||
self.conformance.exporthtml(plan_id, Path(__file__).parent / "exports")
|
||||
|
||||
def run_single_test(self, module_id: str):
|
||||
"""Process instructions for a single test, navigate to browser URLs and take screenshots"""
|
||||
tested_browser_url = 0
|
||||
uploaded_image = 0
|
||||
cleared_cookies = False
|
||||
while True:
|
||||
# Fetch all info
|
||||
test_status = self.conformance.get_test_status(module_id)
|
||||
test_log = self.conformance.get_test_log(module_id)
|
||||
test_info = self.conformance.get_module_info(module_id)
|
||||
# Check status early, if we're finished already we don't want to do anything extra
|
||||
if test_info["status"] in ["INTERRUPTED", "FINISHED"]:
|
||||
return
|
||||
# Check if we need to clear cookies - tests only indicates this in their written summary
|
||||
# so this check is a bit brittle
|
||||
if "cookies" in test_info["summary"] and not cleared_cookies:
|
||||
# Navigate to our origin to delete cookies in the right context
|
||||
self.driver.get(self.url("authentik_api:user-me") + "?format=json")
|
||||
self.driver.delete_all_cookies()
|
||||
cleared_cookies = True
|
||||
# Check if we need deal with any browser URLs
|
||||
browser_urls = test_status.get("browser", {}).get("urls", [])
|
||||
if len(browser_urls) > tested_browser_url:
|
||||
self.do_browser(browser_urls[tested_browser_url])
|
||||
tested_browser_url += 1
|
||||
continue
|
||||
# Check if we need to upload any items
|
||||
upload_items = [x for x in test_log if "upload" in x]
|
||||
if len(upload_items) > uploaded_image:
|
||||
screenshot = self.get_screenshot()
|
||||
self.conformance.upload_image(
|
||||
module_id, upload_items[uploaded_image]["upload"], screenshot
|
||||
)
|
||||
sleep(3)
|
||||
uploaded_image += 1
|
||||
continue
|
||||
sleep(0.1)
|
||||
|
||||
def get_screenshot(self):
|
||||
"""Get a screenshot, but resize the window first so we don't exceed 500kb"""
|
||||
self.driver.set_window_size(800, 600)
|
||||
screenshot = f"data:image/jpeg;base64,{self.driver.get_screenshot_as_base64()}"
|
||||
self.driver.maximize_window()
|
||||
return screenshot
|
||||
|
||||
def do_browser(self, url):
|
||||
"""For any specific OpenID Conformance test, execute the operations required"""
|
||||
self.driver.get(url)
|
||||
should_expect_completion = False
|
||||
if "if/flow/default-authentication-flow" in self.driver.current_url:
|
||||
self.logger.debug("Logging in")
|
||||
self.login()
|
||||
should_expect_completion = True
|
||||
if "prompt=consent" in url or "offline_access" in url:
|
||||
self.logger.debug("Authorizing")
|
||||
self.wait.until(ec.presence_of_element_located((By.CSS_SELECTOR, "ak-flow-executor")))
|
||||
sleep(1)
|
||||
flow_executor = self.get_shadow_root("ak-flow-executor")
|
||||
consent_stage = self.get_shadow_root("ak-stage-consent", flow_executor)
|
||||
consent_stage.find_element(
|
||||
By.CSS_SELECTOR,
|
||||
"[type=submit]",
|
||||
).click()
|
||||
should_expect_completion = True
|
||||
if should_expect_completion:
|
||||
self.wait.until(ec.presence_of_element_located((By.CSS_SELECTOR, "#complete")))
|
192
tests/openid_conformance/conformance.py
Normal file
192
tests/openid_conformance/conformance.py
Normal file
@ -0,0 +1,192 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
from urllib3.util.retry import Retry
|
||||
|
||||
|
||||
class Conformance:
|
||||
HTTP_OK = 200
|
||||
HTTP_CREATED = 201
|
||||
|
||||
def __init__(self, api_url_base, api_token, verify_ssl):
|
||||
if not api_url_base.endswith("/"):
|
||||
api_url_base += "/"
|
||||
self.api_url_base = api_url_base
|
||||
self.session = requests.Session()
|
||||
self.session.verify = verify_ssl
|
||||
retries = Retry(
|
||||
total=5,
|
||||
backoff_factor=1,
|
||||
status_forcelist=[500, 502, 503, 504],
|
||||
allowed_methods=["GET", "POST"],
|
||||
)
|
||||
self.session.mount("https://", HTTPAdapter(max_retries=retries))
|
||||
self.session.mount("http://", HTTPAdapter(max_retries=retries))
|
||||
|
||||
self.session.headers.update({"Content-Type": "application/json"})
|
||||
if api_token is not None:
|
||||
self.session.headers.update({"Authorization": f"Bearer {api_token}"})
|
||||
|
||||
def get_all_test_modules(self):
|
||||
url = f"{self.api_url_base}api/runner/available"
|
||||
response = self.session.get(url)
|
||||
if response.status_code != Conformance.HTTP_OK:
|
||||
raise Exception(
|
||||
f"get_all_test_modules failed - HTTP {response.status_code} {response.content}"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def get_test_status(self, module_id):
|
||||
url = f"{self.api_url_base}api/runner/{module_id}"
|
||||
response = self.session.get(url)
|
||||
if response.status_code != Conformance.HTTP_OK:
|
||||
raise Exception(
|
||||
f"get_test_status failed - HTTP {response.status_code} {response.content}"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def exporthtml(self, plan_id, path):
|
||||
for _ in range(5):
|
||||
url = f"{self.api_url_base}api/plan/exporthtml/{plan_id}"
|
||||
try:
|
||||
with self.session.get(url, stream=True) as response:
|
||||
if response.status_code != Conformance.HTTP_OK:
|
||||
raise Exception(
|
||||
f"exporthtml failed - HTTP {response.status_code} {response.content}"
|
||||
)
|
||||
cd = response.headers.get("content-disposition", "")
|
||||
local_filename = re.findall('filename="(.+)"', cd)[0]
|
||||
full_path = os.path.join(path, local_filename)
|
||||
with open(full_path, "wb") as f:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
zip_file = zipfile.ZipFile(full_path)
|
||||
ret = zip_file.testzip()
|
||||
if ret is not None:
|
||||
raise Exception(f"exporthtml returned corrupt zip file: {ret}")
|
||||
return full_path
|
||||
except Exception as e:
|
||||
print(f"requests {url} exception {e} caught - retrying")
|
||||
time.sleep(1)
|
||||
raise Exception(f"exporthtml for {plan_id} failed even after retries")
|
||||
|
||||
def create_certification_package(
|
||||
self, plan_id, conformance_pdf_path, rp_logs_zip_path=None, output_zip_directory="./"
|
||||
):
|
||||
with (
|
||||
open(conformance_pdf_path, "rb") as cert_pdf,
|
||||
open(rp_logs_zip_path, "rb") if rp_logs_zip_path else open(os.devnull, "rb") as rp_logs,
|
||||
):
|
||||
files = {
|
||||
"certificationOfConformancePdf": cert_pdf,
|
||||
"clientSideData": rp_logs,
|
||||
}
|
||||
|
||||
headers = self.session.headers.copy()
|
||||
headers.pop("Content-Type", None)
|
||||
|
||||
url = f"{self.api_url_base}api/plan/{plan_id}/certificationpackage"
|
||||
response = self.session.post(url, files=files, headers=headers)
|
||||
if response.status_code != Conformance.HTTP_OK:
|
||||
raise Exception(
|
||||
f"certificationpackage failed - HTTP {response.status_code} {response.content}"
|
||||
)
|
||||
|
||||
cd = response.headers.get("content-disposition", "")
|
||||
local_filename = re.findall('filename="(.+)"', cd)[0]
|
||||
full_path = os.path.join(output_zip_directory, local_filename)
|
||||
with open(full_path, "wb") as f:
|
||||
f.write(response.content)
|
||||
print(f"Certification package zip for plan id {plan_id} written to {full_path}")
|
||||
|
||||
def create_test_plan(self, name, configuration, variant=None):
|
||||
url = f"{self.api_url_base}api/plan"
|
||||
payload = {"planName": name}
|
||||
if variant is not None:
|
||||
payload["variant"] = json.dumps(variant)
|
||||
response = self.session.post(url, params=payload, data=configuration)
|
||||
if response.status_code != Conformance.HTTP_CREATED:
|
||||
raise Exception(
|
||||
f"create_test_plan failed - HTTP {response.status_code} {response.content}"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def create_test(self, test_name, configuration):
|
||||
url = f"{self.api_url_base}api/runner"
|
||||
payload = {"test": test_name}
|
||||
response = self.session.post(url, params=payload, data=configuration)
|
||||
if response.status_code != Conformance.HTTP_CREATED:
|
||||
raise Exception(f"create_test failed - HTTP {response.status_code} {response.content}")
|
||||
return response.json()
|
||||
|
||||
def create_test_from_plan(self, plan_id, test_name):
|
||||
url = f"{self.api_url_base}api/runner"
|
||||
payload = {"test": test_name, "plan": plan_id}
|
||||
response = self.session.post(url, params=payload)
|
||||
if response.status_code != Conformance.HTTP_CREATED:
|
||||
raise Exception(
|
||||
f"create_test_from_plan failed - HTTP {response.status_code} {response.content}"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def create_test_from_plan_with_variant(self, plan_id, test_name, variant):
|
||||
url = f"{self.api_url_base}api/runner"
|
||||
payload = {"test": test_name, "plan": plan_id}
|
||||
if variant is not None:
|
||||
payload["variant"] = json.dumps(variant)
|
||||
response = self.session.post(url, params=payload)
|
||||
if response.status_code != Conformance.HTTP_CREATED:
|
||||
raise Exception(
|
||||
"create_test_from_plan_with_variant failed - "
|
||||
f"HTTP {response.status_code} {response.content}"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def get_module_info(self, module_id):
|
||||
url = f"{self.api_url_base}api/info/{module_id}"
|
||||
response = self.session.get(url)
|
||||
if response.status_code != Conformance.HTTP_OK:
|
||||
raise Exception(
|
||||
f"get_module_info failed - HTTP {response.status_code} {response.content}"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def get_test_log(self, module_id):
|
||||
url = f"{self.api_url_base}api/log/{module_id}"
|
||||
response = self.session.get(url)
|
||||
if response.status_code != Conformance.HTTP_OK:
|
||||
raise Exception(f"get_test_log failed - HTTP {response.status_code} {response.content}")
|
||||
return response.json()
|
||||
|
||||
def upload_image(self, log_id, placeholder, data):
|
||||
url = f"{self.api_url_base}api/log/{log_id}/images/{placeholder}"
|
||||
response = self.session.post(url, data=data, headers={"Content-Type": "text/plain"})
|
||||
if response.status_code != Conformance.HTTP_OK:
|
||||
raise Exception(f"upload_image failed - HTTP {response.status_code} {response.content}")
|
||||
|
||||
def start_test(self, module_id):
|
||||
url = f"{self.api_url_base}api/runner/{module_id}"
|
||||
response = self.session.post(url)
|
||||
if response.status_code != Conformance.HTTP_OK:
|
||||
raise Exception(f"start_test failed - HTTP {response.status_code} {response.content}")
|
||||
return response.json()
|
||||
|
||||
def wait_for_state(self, module_id, required_states, timeout=240):
|
||||
timeout_at = time.time() + timeout
|
||||
while time.time() < timeout_at:
|
||||
info = self.get_module_info(module_id)
|
||||
status = info.get("status")
|
||||
if status in required_states:
|
||||
return status
|
||||
if status == "INTERRUPTED":
|
||||
raise Exception(f"Test module {module_id} has moved to INTERRUPTED")
|
||||
time.sleep(1)
|
||||
raise Exception(
|
||||
f"Timed out waiting for test module {module_id} "
|
||||
f"to be in one of states: {required_states}"
|
||||
)
|
10
tests/openid_conformance/test_basic.py
Normal file
10
tests/openid_conformance/test_basic.py
Normal file
@ -0,0 +1,10 @@
|
||||
from tests.e2e.utils import retry
|
||||
from tests.openid_conformance.base import TestOpenIDConformance
|
||||
|
||||
|
||||
class TestOpenIDConformanceBasic(TestOpenIDConformance):
|
||||
|
||||
@retry()
|
||||
def test_oidcc_basic_certification_test(self):
|
||||
test_plan_name = "oidcc-basic-certification-test-plan"
|
||||
self.run_test(test_plan_name, self.test_plan_config)
|
10
tests/openid_conformance/test_implicit.py
Normal file
10
tests/openid_conformance/test_implicit.py
Normal file
@ -0,0 +1,10 @@
|
||||
from tests.e2e.utils import retry
|
||||
from tests.openid_conformance.base import TestOpenIDConformance
|
||||
|
||||
|
||||
class TestOpenIDConformanceBasic(TestOpenIDConformance):
|
||||
|
||||
@retry()
|
||||
def test_oidcc_basic_certification_test(self):
|
||||
test_plan_name = "oidcc-basic-certification-test-plan"
|
||||
self.run_test(test_plan_name, self.test_plan_config)
|
15
uv.lock
generated
15
uv.lock
generated
@ -259,6 +259,7 @@ dev = [
|
||||
{ name = "pytest-django" },
|
||||
{ name = "pytest-github-actions-annotate-failures" },
|
||||
{ name = "pytest-randomly" },
|
||||
{ name = "pytest-subtests" },
|
||||
{ name = "pytest-timeout" },
|
||||
{ name = "requests-mock" },
|
||||
{ name = "ruff" },
|
||||
@ -358,6 +359,7 @@ dev = [
|
||||
{ name = "pytest-django", specifier = "==4.11.1" },
|
||||
{ name = "pytest-github-actions-annotate-failures", specifier = "==0.3.0" },
|
||||
{ name = "pytest-randomly", specifier = "==3.16.0" },
|
||||
{ name = "pytest-subtests", specifier = ">=0.14.1" },
|
||||
{ name = "pytest-timeout", specifier = "==2.4.0" },
|
||||
{ name = "requests-mock", specifier = "==1.12.1" },
|
||||
{ name = "ruff", specifier = "==0.11.9" },
|
||||
@ -2685,6 +2687,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/22/70/b31577d7c46d8e2f9baccfed5067dd8475262a2331ffb0bfdf19361c9bde/pytest_randomly-3.16.0-py3-none-any.whl", hash = "sha256:8633d332635a1a0983d3bba19342196807f6afb17c3eef78e02c2f85dade45d6", size = 8396, upload-time = "2024-10-25T15:45:32.78Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-subtests"
|
||||
version = "0.14.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c0/4c/ba9eab21a2250c2d46c06c0e3cd316850fde9a90da0ac8d0202f074c6817/pytest_subtests-0.14.1.tar.gz", hash = "sha256:350c00adc36c3aff676a66135c81aed9e2182e15f6c3ec8721366918bbbf7580", size = 17632, upload-time = "2024-12-10T00:21:04.856Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/b7/7ca948d35642ae72500efda6ba6fa61dcb6683feb596d19c4747c63c0789/pytest_subtests-0.14.1-py3-none-any.whl", hash = "sha256:e92a780d98b43118c28a16044ad9b841727bd7cb6a417073b38fd2d7ccdf052d", size = 8833, upload-time = "2024-12-10T00:20:58.873Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-timeout"
|
||||
version = "2.4.0"
|
||||
|
Reference in New Issue
Block a user