aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJason Gunthorpe <jgg@nvidia.com>2020-08-07 10:42:02 -0300
committerJason Gunthorpe <jgg@nvidia.com>2020-08-31 10:59:00 -0300
commitfc68b784c7c41c820dcffb59778db7511fe29047 (patch)
treefdef15db9bfde49c82fe1579a1a05793c222b58a
parent4bde2de9cb6e9c2753c6eb18a523b2c92f341827 (diff)
downloadcloud_mdir_sync-fc68b784c7c41c820dcffb59778db7511fe29047.tar.gz
cloud_mdir_sync-fc68b784c7c41c820dcffb59778db7511fe29047.tar.bz2
cloud_mdir_sync-fc68b784c7c41c820dcffb59778db7511fe29047.zip
O365: Protect the page fetches in get_json_paged() with _retry_protect
Getting a 404 or other error here would explode the whole thing. Instead the fetch of the next URL should be re-issued. Signed-off-by: Jason Gunthorpe <jgg@nvidia.com>
-rw-r--r--cloud_mdir_sync/office365.py8
1 files changed, 6 insertions, 2 deletions
diff --git a/cloud_mdir_sync/office365.py b/cloud_mdir_sync/office365.py
index 32ae45a..fa130a8 100644
--- a/cloud_mdir_sync/office365.py
+++ b/cloud_mdir_sync/office365.py
@@ -378,6 +378,11 @@ class GraphAPI(oauth.Account):
async for _ in op.content.iter_any():
pass
+ @_retry_protect
+ async def __get_json_paged_next(self, uri):
+ async with self.session.get(uri, headers=self.headers) as op:
+ return await self._check_json(op)
+
async def get_json_paged(self, ver, path, params=None):
"""Return an iterator that iterates over every JSON element in a paged
result"""
@@ -389,8 +394,7 @@ class GraphAPI(oauth.Account):
uri = resp.get("@odata.nextLink")
if uri is None:
break
- async with self.session.get(uri, headers=self.headers) as op:
- resp = await self._check_json(op)
+ resp = await self.__get_json_paged_next(uri)
async def _execute_batch(self, batch):
resp = await self.post_json("v1.0", "/$batch", batch)