From b063078d3dcc78f48e11d48cf0c5dbe6fa6a3e08 Mon Sep 17 00:00:00 2001 From: Chris van Run Date: Tue, 12 Nov 2024 19:54:52 +0100 Subject: [PATCH] More Data Models (#172) This PR does two major things: - Allow object/model retrieval using `api_url` (was done with the direct request, now forced through the APIs) - Fix resulting deprecation warnings on key access (i.e. `object["pk"]` It also makes the recurse_call wrapper more robust by outputting the error (`raise from e`) and allowing for retries when `ObjectNotFound` is thrown. --- gcapi/apibase.py | 30 +++++--- gcapi/client.py | 61 ++++++++-------- tests/async_integration_tests.py | 80 +++++++++++++-------- tests/integration_tests.py | 117 +++++++++++++++++++++++-------- tests/utils.py | 23 ++++-- 5 files changed, 209 insertions(+), 102 deletions(-) diff --git a/gcapi/apibase.py b/gcapi/apibase.py index 20af5d5..46a0338 100644 --- a/gcapi/apibase.py +++ b/gcapi/apibase.py @@ -143,14 +143,21 @@ def iterate_all(self, params=None) -> Iterator[T]: yield from current_list offset += req_count - def detail(self, pk=None, **params) -> Generator[T, dict[Any, Any], T]: - if all((pk, params)): - raise ValueError("Only one of pk or params must be specified") - - if pk is not None: + def detail( + self, pk=None, api_url=None, **params + ) -> Generator[T, dict[Any, Any], T]: + if sum(bool(arg) for arg in [pk, api_url, params]) != 1: + raise ValueError( + "Exactly one of pk, api_url, or params must be specified" + ) + if pk is not None or api_url is not None: + if pk is not None: + request_kwargs = dict(path=urljoin(self.base_path, pk + "/")) + else: + request_kwargs = dict(url=api_url) try: result = yield self.yield_request( - method="GET", path=urljoin(self.base_path, pk + "/") + method="GET", **request_kwargs ) return self.model(**result) except HTTPStatusError as e: @@ -170,6 +177,8 @@ def detail(self, pk=None, **params) -> Generator[T, dict[Any, Any], T]: class ModifiableMixin(Common): + response_model: type + def _process_request_arguments(self, data): if data is None: data = {} @@ -188,13 +197,16 @@ def perform_request(self, method, data=None, pk=False): return (yield from self._execute_request(method, data, pk)) def create(self, **kwargs): - return (yield from self.perform_request("POST", data=kwargs)) + result = yield from self.perform_request("POST", data=kwargs) + return self.response_model(**result) def update(self, pk, **kwargs): - return (yield from self.perform_request("PUT", pk=pk, data=kwargs)) + result = yield from self.perform_request("PUT", pk=pk, data=kwargs) + return self.rsponse_model(**result) def partial_update(self, pk, **kwargs): - return (yield from self.perform_request("PATCH", pk=pk, data=kwargs)) + result = yield from self.perform_request("PATCH", pk=pk, data=kwargs) + return result def delete(self, pk): return (yield from self.perform_request("DELETE", pk=pk)) diff --git a/gcapi/client.py b/gcapi/client.py index 5287f89..788f9ef 100644 --- a/gcapi/client.py +++ b/gcapi/client.py @@ -57,11 +57,11 @@ def download( if pk is not None: image = yield from self.detail(pk=pk) elif url is not None: - image = yield self.yield_request(method="GET", url=url) + image = yield from self.detail(api_url=url) else: image = yield from self.detail(**params) - files = image["files"] + files = image.files # Make sure file destination exists p = Path(filename).absolute() @@ -72,16 +72,16 @@ def download( # Download the files downloaded_files = [] for file in files: - if image_type and file["image_type"] != image_type: + if image_type and file.image_type != image_type: continue data = ( yield self.yield_request( - method="GET", url=file["file"], follow_redirects=True + method="GET", url=file.file, follow_redirects=True ) ).content - suffix = file["file"].split(".")[-1] + suffix = file.file.split(".")[-1] local_file = directory / f"{basename}.{suffix}" with local_file.open("wb") as fp: fp.write(data) @@ -96,11 +96,13 @@ class UploadSessionsAPI( ): base_path = "cases/upload-sessions/" model = gcapi.models.RawImageUploadSession + response_model = gcapi.models.RawImageUploadSession class WorkstationSessionsAPI(APIBase[gcapi.models.Session]): base_path = "workstations/sessions/" model = gcapi.models.Session + response_model = gcapi.models.RawImageUploadSession class ReaderStudyQuestionsAPI(APIBase[gcapi.models.Question]): @@ -113,11 +115,13 @@ class ReaderStudyMineAnswersAPI( ): base_path = "reader-studies/answers/mine/" model = gcapi.models.ReaderStudy + response_model = gcapi.models.Answer class ReaderStudyAnswersAPI(ModifiableMixin, APIBase[gcapi.models.Answer]): base_path = "reader-studies/answers/" model = gcapi.models.Answer + response_model = gcapi.models.Answer sub_apis = {"mine": ReaderStudyMineAnswersAPI} @@ -142,6 +146,7 @@ class ReaderStudyDisplaySetsAPI( ): base_path = "reader-studies/display-sets/" model = gcapi.models.DisplaySet + response_model = gcapi.models.DisplaySetPost class ReaderStudiesAPI(APIBase[gcapi.models.ReaderStudy]): @@ -177,6 +182,7 @@ class AlgorithmsAPI(APIBase[gcapi.models.Algorithm]): class AlgorithmJobsAPI(ModifiableMixin, APIBase[gcapi.models.HyperlinkedJob]): base_path = "algorithms/jobs/" model = gcapi.models.HyperlinkedJob + response_model = gcapi.models.JobPost @mark_generator def by_input_image(self, pk): @@ -191,6 +197,7 @@ class ArchivesAPI(APIBase[gcapi.models.Archive]): class ArchiveItemsAPI(ModifiableMixin, APIBase[gcapi.models.ArchiveItem]): base_path = "archives/items/" model = gcapi.models.ArchiveItem + response_model = gcapi.models.ArchiveItemPost class ComponentInterfacesAPI(APIBase[gcapi.models.ComponentInterface]): @@ -207,13 +214,12 @@ class UploadsAPI(APIBase[gcapi.models.UserUpload]): max_retries = 10 def create(self, *, filename): - return ( - yield self.yield_request( - method="POST", - path=self.base_path, - json={"filename": str(filename)}, - ) + result = yield self.yield_request( + method="POST", + path=self.base_path, + json={"filename": str(filename)}, ) + return self.model(**result) def generate_presigned_urls(self, *, pk, s3_upload_id, part_numbers): url = urljoin( @@ -248,8 +254,8 @@ def list_parts(self, *, pk, s3_upload_id): def upload_fileobj(self, *, fileobj, filename): user_upload = yield from self.create(filename=filename) - pk = user_upload["pk"] - s3_upload_id = user_upload["s3_upload_id"] + pk = user_upload.pk + s3_upload_id = user_upload.s3_upload_id try: parts = yield from self._put_fileobj( @@ -261,11 +267,10 @@ def upload_fileobj(self, *, fileobj, filename): ) raise - return ( # noqa: B901 - yield from self.complete_multipart_upload( - pk=pk, s3_upload_id=s3_upload_id, parts=parts - ) + result = yield from self.complete_multipart_upload( + pk=pk, s3_upload_id=s3_upload_id, parts=parts ) + return self.model(**result) # noqa: B901 def _put_fileobj(self, *, fileobj, pk, s3_upload_id): part_number = 1 # s3 uses 1-indexed chunks @@ -502,7 +507,7 @@ def _upload_image_files(self, *, files, **kwargs): return ( yield from self.__org_api_meta.raw_image_upload_sessions.create( - uploads=[u["api_url"] for u in uploads], **kwargs + uploads=[u.api_url for u in uploads], **kwargs ) ) @@ -677,7 +682,7 @@ def run_external_job(self, *, algorithm: str, inputs: dict[str, Any]): raw_image_upload_session = ( yield from self._upload_image_files(files=value) ) - i["upload_session"] = raw_image_upload_session["api_url"] + i["upload_session"] = raw_image_upload_session.api_url elif isinstance(value, str): i["image"] = value elif ci["super_kind"].lower() == "file": # type: ignore @@ -686,7 +691,7 @@ def run_external_job(self, *, algorithm: str, inputs: dict[str, Any]): f"Only a single file can be provided for {ci['title']}." # type: ignore ) upload = yield from self._upload_file(value) - i["user_upload"] = upload["api_url"] + i["user_upload"] = upload.api_url else: i["value"] = value job["inputs"].append(i) # type: ignore @@ -703,13 +708,13 @@ def update_archive_item( First, retrieve the archive items from your archive: archive = next(client.archives.iterate_all(params={"slug": "..."})) items = list( - client.archive_items.iterate_all(params={"archive": archive["pk"]}) + client.archive_items.iterate_all(params={"archive": archive.pk}) ) To then add, for example, a PDF report and a lung volume value to the first archive item , provide the interface slugs together with the respective value or file path as follows: client.update_archive_item( - archive_item_pk=items[0]['id'], + archive_item_pk=items[0].id, values={ "report": [...], "lung-volume": 1.9, @@ -754,7 +759,7 @@ def update_archive_item( raw_image_upload_session = ( yield from self._upload_image_files(files=value) ) - i["upload_session"] = raw_image_upload_session["api_url"] + i["upload_session"] = raw_image_upload_session.api_url elif isinstance(value, str): i["image"] = value elif ci.super_kind.lower() == "file": @@ -764,7 +769,7 @@ def update_archive_item( f"to a {ci.slug} interface." ) upload = yield from self._upload_file(value) - i["user_upload"] = upload["api_url"] + i["user_upload"] = upload.api_url else: i["value"] = value civs["values"].append(i) @@ -870,19 +875,19 @@ def add_cases_to_reader_study( super_kind = interface.super_kind.casefold() if super_kind == "image": yield from self._upload_image_files( - display_set=ds["pk"], interface=slug, files=value + display_set=ds.pk, interface=slug, files=value ) data = {} elif super_kind == "file": upload = yield from self._upload_file(value) - data["user_upload"] = upload["api_url"] + data["user_upload"] = upload.api_url else: data["value"] = value if data: values.append(data) yield from self.__org_api_meta.reader_studies.display_sets.partial_update( - pk=ds["pk"], values=values + pk=ds.pk, values=values ) - res.append(ds["pk"]) + res.append(ds.pk) return res # noqa: B901 diff --git a/tests/async_integration_tests.py b/tests/async_integration_tests.py index 92026ca..2bde8c5 100644 --- a/tests/async_integration_tests.py +++ b/tests/async_integration_tests.py @@ -1,3 +1,4 @@ +from functools import partial from io import BytesIO from pathlib import Path @@ -24,8 +25,8 @@ async def get_upload_session(client, upload_pk): @async_recurse_call -async def get_file(client, url): - return await client(url=url, follow_redirects=True) +async def get_image(client, url): + return await client.images.detail(api_url=url) @async_recurse_call @@ -37,6 +38,26 @@ async def get_archive_items(client, archive_pk, min_size): return il +@async_recurse_call +async def get_complete_civ_set(get_func, complete_num_civ): + civ_set = await get_func() + num_civ = len(civ_set.values) + if num_civ != complete_num_civ: + raise ValueError( + f"Found {num_civ}, expected {complete_num_civ} values" + ) + for civ in civ_set.values: + if all( + [ + civ.file is None, + civ.image is None, + civ.value is None, + ] + ): + raise ValueError(f"Null values: {civ}") + return civ_set + + @pytest.mark.anyio async def test_raw_image_and_upload_session(local_grand_challenge): async with AsyncClient( @@ -131,18 +152,18 @@ async def test_upload_cases_to_archive( files=[Path(__file__).parent / "testdata" / f for f in files], ) - us = await get_upload_session(c, us["pk"]) + us = await get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 - image = await get_file(c, us.image_set[0]) + image = await get_image(c, us.image_set[0]) # And that it was added to the archive archive = await c.archives.iterate_all( params={"slug": "archive"} ).__anext__() archive_images = c.images.iterate_all(params={"archive": archive.pk}) - assert image["pk"] in [im.pk async for im in archive_images] + assert image.pk in [im.pk async for im in archive_images] archive_items = c.archive_items.iterate_all( params={"archive": archive.pk} ) @@ -156,17 +177,15 @@ async def test_upload_cases_to_archive( } if interface: - assert image_url_to_interface_slug[image["api_url"]] == interface + assert image_url_to_interface_slug[image.api_url] == interface else: assert ( - image_url_to_interface_slug[image["api_url"]] + image_url_to_interface_slug[image.api_url] == "generic-medical-image" ) # And that we can download it - response = await c( - url=image["files"][0]["file"], follow_redirects=True - ) + response = await c(url=image.files[0].file, follow_redirects=True) assert response.status_code == 200 @@ -229,20 +248,20 @@ async def test_upload_cases_to_archive_item_with_existing_interface( files=[Path(__file__).parent / "testdata" / "image10x10x101.mha"], ) - us = await get_upload_session(c, us["pk"]) + us = await get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 - image = await get_file(c, us.image_set[0]) + image = await get_image(c, us.image_set[0]) # And that it was added to the archive item item = await c.archive_items.detail(pk=items_list[-1].pk) - assert image["api_url"] in [civ.image for civ in item.values] + assert image.api_url in [civ.image for civ in item.values] # with the correct interface im_to_interface = { civ.image: civ.interface.slug for civ in item.values } - assert im_to_interface[image["api_url"]] == "generic-medical-image" + assert im_to_interface[image.api_url] == "generic-medical-image" @pytest.mark.anyio @@ -274,20 +293,20 @@ async def test_upload_cases_to_archive_item_with_new_interface( files=[Path(__file__).parent / "testdata" / "image10x10x101.mha"], ) - us = await get_upload_session(c, us["pk"]) + us = await get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 - image = await get_file(c, us.image_set[0]) + image = await get_image(c, us.image_set[0]) # And that it was added to the archive item item = await c.archive_items.detail(pk=items_list[-1].pk) - assert image["api_url"] in [civ.image for civ in item.values] + assert image.api_url in [civ.image for civ in item.values] # with the correct interface im_to_interface = { civ.image: civ.interface.slug for civ in item.values } - assert im_to_interface[image["api_url"]] == "generic-overlay" + assert im_to_interface[image.api_url] == "generic-overlay" @pytest.mark.parametrize("files", (["image10x10x101.mha"],)) @@ -301,7 +320,7 @@ async def test_download_cases(local_grand_challenge, files, tmpdir): files=[Path(__file__).parent / "testdata" / f for f in files], ) - us = await get_upload_session(c, us["pk"]) + us = await get_upload_session(c, us.pk) # Check that we can download the uploaded image tmpdir = Path(tmpdir) @@ -358,8 +377,8 @@ async def run_job(): # algorithm might not be ready yet job = await run_job() - assert job["status"] == "Validating inputs" - job = await c.algorithm_jobs.detail(job["pk"]) + assert job.status == "Validating inputs" + job = await c.algorithm_jobs.detail(job.pk) assert job.status in {"Validating inputs", "Queued", "Started"} @@ -748,27 +767,28 @@ async def test_add_cases_to_reader_study(display_sets, local_grand_challenge): @async_recurse_call async def check_image(interface_value, expected_name): - image = await get_file(c, interface_value.image) - assert image["name"] == expected_name + image = await get_image(c, interface_value.image) + assert image.name == expected_name def check_annotation(interface_value, expected): assert interface_value.value == expected @async_recurse_call async def check_file(interface_value, expected_name): - response = await get_file(c, interface_value.file) + response = await c(url=interface_value.file, follow_redirects=True) assert response.url.path.endswith(expected_name) # Check for each display set that the values are added for display_set_pk, display_set in zip( added_display_sets, display_sets ): - ds = await c.reader_studies.display_sets.detail(pk=display_set_pk) - # may take a while for the images to be added - while len(ds.values) != len(display_set): - ds = await c.reader_studies.display_sets.detail( - pk=display_set_pk - ) + + ds = await get_complete_civ_set( + partial( + c.reader_studies.display_sets.detail, pk=display_set_pk + ), + complete_num_civ=len(display_set), + ) for interface, value in display_set.items(): civ = [ diff --git a/tests/integration_tests.py b/tests/integration_tests.py index 410aa2f..4d880cb 100644 --- a/tests/integration_tests.py +++ b/tests/integration_tests.py @@ -1,3 +1,4 @@ +from functools import partial from io import BytesIO from pathlib import Path @@ -24,8 +25,8 @@ def get_upload_session(client, upload_pk): @recurse_call -def get_file(client, image_url): - return client(url=image_url, follow_redirects=True) +def get_image(client, image_url): + return client.images.detail(api_url=image_url) @recurse_call @@ -38,6 +39,26 @@ def get_archive_items(client, archive_pk, min_size): return items +@recurse_call +def get_complete_civ_set(get_func, complete_num_civ): + civ_set = get_func() + num_civ = len(civ_set.values) + if num_civ != complete_num_civ: + raise ValueError( + f"Found {num_civ}, expected {complete_num_civ} values" + ) + for civ in civ_set.values: + if all( + [ + civ.file is None, + civ.image is None, + civ.value is None, + ] + ): + raise ValueError(f"Null values: {civ}") + return civ_set + + @pytest.mark.parametrize( "files", ( @@ -123,17 +144,17 @@ def test_upload_cases_to_archive(local_grand_challenge, files, interface): files=[Path(__file__).parent / "testdata" / f for f in files], ) - us = get_upload_session(c, us["pk"]) + us = get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 - image = get_file(c, us.image_set[0]) + image = get_image(c, image_url=us.image_set[0]) # And that it was added to the archive archive = next(c.archives.iterate_all(params={"slug": "archive"})) archive_images = c.images.iterate_all(params={"archive": archive.pk}) - assert image["pk"] in [im.pk for im in archive_images] + assert image.pk in [im.pk for im in archive_images] archive_items = c.archive_items.iterate_all(params={"archive": archive.pk}) # with the correct interface image_url_to_interface_slug_dict = { @@ -143,15 +164,15 @@ def test_upload_cases_to_archive(local_grand_challenge, files, interface): if value.image } if interface: - assert image_url_to_interface_slug_dict[image["api_url"]] == interface + assert image_url_to_interface_slug_dict[image.api_url] == interface else: assert ( - image_url_to_interface_slug_dict[image["api_url"]] + image_url_to_interface_slug_dict[image.api_url] == "generic-medical-image" ) # And that we can download it - response = c(url=image["files"][0]["file"], follow_redirects=True) + response = c(url=image.files[0].file, follow_redirects=True) assert response.status_code == 200 @@ -200,19 +221,19 @@ def test_upload_cases_to_archive_item_with_existing_interface( files=[Path(__file__).parent / "testdata" / "image10x10x101.mha"], ) - us = get_upload_session(c, us["pk"]) + us = get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 - image = get_file(c, us.image_set[0]) + image = get_image(c, us.image_set[0]) # And that it was added to the archive item item = c.archive_items.detail(pk=item.pk) - assert image["api_url"] in [civ.image for civ in item.values if civ.image] + assert image.api_url in [civ.image for civ in item.values if civ.image] # with the correct interface im_to_interface = {civ.image: civ.interface.slug for civ in item.values} - assert im_to_interface[image["api_url"]] == "generic-medical-image" + assert im_to_interface[image.api_url] == "generic-medical-image" def test_upload_cases_to_archive_item_with_new_interface( @@ -231,18 +252,18 @@ def test_upload_cases_to_archive_item_with_new_interface( files=[Path(__file__).parent / "testdata" / "image10x10x101.mha"], ) - us = get_upload_session(c, us["pk"]) + us = get_upload_session(c, us.pk) # Check that only one image was created assert len(us.image_set) == 1 - image = get_file(c, us.image_set[0]) + image = get_image(c, us.image_set[0]) # And that it was added to the archive item item = c.archive_items.detail(pk=item.pk) - assert image["api_url"] in [civ.image for civ in item.values if civ.image] + assert image.api_url in [civ.image for civ in item.values if civ.image] # with the correct interface im_to_interface = {civ.image: civ.interface.slug for civ in item.values} - assert im_to_interface[image["api_url"]] == "generic-overlay" + assert im_to_interface[image.api_url] == "generic-overlay" @pytest.mark.parametrize("files", (["image10x10x101.mha"],)) @@ -256,7 +277,7 @@ def test_download_cases(local_grand_challenge, files, tmpdir): files=[Path(__file__).parent / "testdata" / f for f in files], ) - us = get_upload_session(c, us["pk"]) + us = get_upload_session(c, us.pk) # Check that we can download the uploaded image tmpdir = Path(tmpdir) @@ -316,8 +337,8 @@ def run_job(): # algorithm might not be ready yet job = run_job() - assert job["status"] == "Validating inputs" - job = c.algorithm_jobs.detail(job["pk"]) + assert job.status == "Validating inputs" + job = c.algorithm_jobs.detail(job.pk) assert job.status in {"Validating inputs", "Queued", "Started"} @@ -330,8 +351,9 @@ def test_get_algorithm_by_slug(local_grand_challenge): by_slug = c.algorithms.detail(slug="test-algorithm-evaluation-image-0") by_pk = c.algorithms.detail(pk=by_slug.pk) + by_api_url = c.algorithms.detail(by_api_url=by_slug.api_url) - assert by_pk == by_slug + assert by_pk == by_slug == by_api_url def test_get_reader_study_by_slug(local_grand_challenge): @@ -341,18 +363,48 @@ def test_get_reader_study_by_slug(local_grand_challenge): by_slug = c.reader_studies.detail(slug="reader-study") by_pk = c.reader_studies.detail(pk=by_slug.pk) + by_api_url = c.reader_studies.detail(by_api_url=by_slug.api_url) - assert by_pk == by_slug + assert by_pk == by_slug == by_api_url -@pytest.mark.parametrize("key", ["slug", "pk"]) -def test_detail_no_objects(local_grand_challenge, key): +@pytest.mark.parametrize( + "keys", + [ + {"slug": "foo"}, + {"pk": "foo"}, + {"api_url": "foo"}, + ], +) +def test_detail_no_objects(local_grand_challenge, keys): c = Client( base_url=local_grand_challenge, verify=False, token=READERSTUDY_TOKEN ) + if "api_url" in keys: + keys = { + "api_url": f"{local_grand_challenge}/reader-studies/{keys['api_url']}" + } with pytest.raises(ObjectNotFound): - c.reader_studies.detail(**{key: "foo"}) + c.reader_studies.detail(**keys) + + +@pytest.mark.parametrize( + "keys", + ( + ("api_url", "pk", "slug"), + ("api_url", "pk"), + ("api_url", "slug"), + ("pk", "slug"), + ), +) +def test_detail_multiple_args(local_grand_challenge, keys): + c = Client( + base_url=local_grand_challenge, verify=False, token=READERSTUDY_TOKEN + ) + + with pytest.raises(ValueError): + c.reader_studies.detail(**{k: "foo" for k in keys}) def test_detail_multiple_objects(local_grand_challenge): @@ -662,22 +714,25 @@ def test_add_cases_to_reader_study(display_sets, local_grand_challenge): @recurse_call def check_image(interface_value, expected_name): - image = get_file(c, interface_value.image) - assert image["name"] == expected_name + image = get_image(c, interface_value.image) + assert image.name == expected_name def check_annotation(interface_value, expected): assert interface_value.value == expected @recurse_call def check_file(interface_value, expected_name): - response = get_file(c, interface_value.file) + response = c(url=interface_value.file, follow_redirects=True) assert response.url.path.endswith(expected_name) for display_set_pk, display_set in zip(added_display_sets, display_sets): - ds = c.reader_studies.display_sets.detail(pk=display_set_pk) - # may take a while for the images to be added - while len(ds.values) != len(display_set): - ds = c.reader_studies.display_sets.detail(pk=display_set_pk) + + ds = get_complete_civ_set( + get_func=partial( + c.reader_studies.display_sets.detail, pk=display_set_pk + ), + complete_num_civ=len(display_set), + ) for interface, value in display_set.items(): civ = [ diff --git a/tests/utils.py b/tests/utils.py index cfc8ad9..0bb074c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -3,6 +3,7 @@ from httpx import AsyncHTTPTransport, HTTPStatusError, HTTPTransport +from gcapi.exceptions import ObjectNotFound from tests.scripts.constants import USER_TOKENS ADMIN_TOKEN = USER_TOKENS["admin"] @@ -13,14 +14,21 @@ def recurse_call(func): def wrapper(*args, **kwargs): + last_error = None for _ in range(60): try: result = func(*args, **kwargs) break - except (HTTPStatusError, ValueError): + except ( + HTTPStatusError, + ValueError, + # Permissions are sometimes delayed, shows as ObjectNotFound + ObjectNotFound, + ) as e: + last_error = e sleep(0.5) else: - raise TimeoutError + raise TimeoutError from last_error return result return wrapper @@ -28,14 +36,21 @@ def wrapper(*args, **kwargs): def async_recurse_call(func): async def wrapper(*args, **kwargs): + last_error = None for _ in range(60): try: result = await func(*args, **kwargs) break - except (HTTPStatusError, ValueError): + except ( + HTTPStatusError, + ValueError, + # Permissions are sometimes delayed, shows as ObjectNotFound + ObjectNotFound, + ) as e: + last_error = e sleep(0.5) else: - raise TimeoutError + raise TimeoutError from last_error return result return wrapper