Skip to content

Commit 9342e27

Browse files
chore: update api_core submodule (#897)
1 parent 285cdd3 commit 9342e27

21 files changed

+142
-317
lines changed

google/__init__.py

Lines changed: 0 additions & 6 deletions
This file was deleted.

google/cloud/__init__.py

Lines changed: 0 additions & 6 deletions
This file was deleted.

google/cloud/bigtable/data/__init__.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@
3232
from google.cloud.bigtable.data.mutations import DeleteAllFromFamily
3333
from google.cloud.bigtable.data.mutations import DeleteAllFromRow
3434

35-
from google.cloud.bigtable.data.exceptions import IdleTimeout
3635
from google.cloud.bigtable.data.exceptions import InvalidChunk
3736
from google.cloud.bigtable.data.exceptions import FailedMutationEntryError
3837
from google.cloud.bigtable.data.exceptions import FailedQueryShardError
@@ -63,7 +62,6 @@
6362
"DeleteAllFromRow",
6463
"Row",
6564
"Cell",
66-
"IdleTimeout",
6765
"InvalidChunk",
6866
"FailedMutationEntryError",
6967
"FailedQueryShardError",

google/cloud/bigtable/data/_async/_mutate_rows.py

Lines changed: 11 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -15,17 +15,16 @@
1515
from __future__ import annotations
1616

1717
from typing import Sequence, TYPE_CHECKING
18-
import asyncio
1918
from dataclasses import dataclass
2019
import functools
2120

2221
from google.api_core import exceptions as core_exceptions
23-
from google.api_core import retry_async as retries
22+
from google.api_core import retry as retries
2423
import google.cloud.bigtable_v2.types.bigtable as types_pb
2524
import google.cloud.bigtable.data.exceptions as bt_exceptions
2625
from google.cloud.bigtable.data._helpers import _make_metadata
27-
from google.cloud.bigtable.data._helpers import _convert_retry_deadline
2826
from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
27+
from google.cloud.bigtable.data._helpers import _retry_exception_factory
2928

3029
# mutate_rows requests are limited to this number of mutations
3130
from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT
@@ -101,17 +100,13 @@ def __init__(
101100
# Entry level errors
102101
bt_exceptions._MutateRowsIncomplete,
103102
)
104-
# build retryable operation
105-
retry = retries.AsyncRetry(
106-
predicate=self.is_retryable,
107-
timeout=operation_timeout,
108-
initial=0.01,
109-
multiplier=2,
110-
maximum=60,
111-
)
112-
retry_wrapped = retry(self._run_attempt)
113-
self._operation = _convert_retry_deadline(
114-
retry_wrapped, operation_timeout, is_async=True
103+
sleep_generator = retries.exponential_sleep_generator(0.01, 2, 60)
104+
self._operation = retries.retry_target_async(
105+
self._run_attempt,
106+
self.is_retryable,
107+
sleep_generator,
108+
operation_timeout,
109+
exception_factory=_retry_exception_factory,
115110
)
116111
# initialize state
117112
self.timeout_generator = _attempt_timeout_generator(
@@ -130,7 +125,7 @@ async def start(self):
130125
"""
131126
try:
132127
# trigger mutate_rows
133-
await self._operation()
128+
await self._operation
134129
except Exception as exc:
135130
# exceptions raised by retryable are added to the list of exceptions for all unfinalized mutations
136131
incomplete_indices = self.remaining_indices.copy()
@@ -180,6 +175,7 @@ async def _run_attempt(self):
180175
result_generator = await self._gapic_fn(
181176
timeout=next(self.timeout_generator),
182177
entries=request_entries,
178+
retry=None,
183179
)
184180
async for result_list in result_generator:
185181
for result in result_list.entries:
@@ -195,13 +191,6 @@ async def _run_attempt(self):
195191
self._handle_entry_error(orig_idx, entry_error)
196192
# remove processed entry from active list
197193
del active_request_indices[result.index]
198-
except asyncio.CancelledError:
199-
# when retry wrapper timeout expires, the operation is cancelled
200-
# make sure incomplete indices are tracked,
201-
# but don't record exception (it will be raised by wrapper)
202-
# TODO: remove asyncio.wait_for in retry wrapper. Let grpc call handle expiration
203-
self.remaining_indices.extend(active_request_indices.values())
204-
raise
205194
except Exception as exc:
206195
# add this exception to list for each mutation that wasn't
207196
# already handled, and update remaining_indices if mutation is retryable

google/cloud/bigtable/data/_async/_read_rows.py

Lines changed: 4 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -31,15 +31,13 @@
3131
from google.cloud.bigtable.data.row import Row, Cell
3232
from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
3333
from google.cloud.bigtable.data.exceptions import InvalidChunk
34-
from google.cloud.bigtable.data.exceptions import RetryExceptionGroup
3534
from google.cloud.bigtable.data.exceptions import _RowSetComplete
3635
from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
3736
from google.cloud.bigtable.data._helpers import _make_metadata
37+
from google.cloud.bigtable.data._helpers import _retry_exception_factory
3838

39-
from google.api_core import retry_async as retries
40-
from google.api_core.retry_streaming_async import retry_target_stream
39+
from google.api_core import retry as retries
4140
from google.api_core.retry import exponential_sleep_generator
42-
from google.api_core import exceptions as core_exceptions
4341

4442
if TYPE_CHECKING:
4543
from google.cloud.bigtable.data._async.client import TableAsync
@@ -107,12 +105,12 @@ def start_operation(self) -> AsyncGenerator[Row, None]:
107105
"""
108106
Start the read_rows operation, retrying on retryable errors.
109107
"""
110-
return retry_target_stream(
108+
return retries.retry_target_stream_async(
111109
self._read_rows_attempt,
112110
self._predicate,
113111
exponential_sleep_generator(0.01, 60, multiplier=2),
114112
self.operation_timeout,
115-
exception_factory=self._build_exception,
113+
exception_factory=_retry_exception_factory,
116114
)
117115

118116
def _read_rows_attempt(self) -> AsyncGenerator[Row, None]:
@@ -343,35 +341,3 @@ def _revise_request_rowset(
343341
# this will avoid an unwanted full table scan
344342
raise _RowSetComplete()
345343
return RowSetPB(row_keys=adjusted_keys, row_ranges=adjusted_ranges)
346-
347-
@staticmethod
348-
def _build_exception(
349-
exc_list: list[Exception], is_timeout: bool, timeout_val: float
350-
) -> tuple[Exception, Exception | None]:
351-
"""
352-
Build retry error based on exceptions encountered during operation
353-
354-
Args:
355-
- exc_list: list of exceptions encountered during operation
356-
- is_timeout: whether the operation failed due to timeout
357-
- timeout_val: the operation timeout value in seconds, for constructing
358-
the error message
359-
Returns:
360-
- tuple of the exception to raise, and a cause exception if applicable
361-
"""
362-
if is_timeout:
363-
# if failed due to timeout, raise deadline exceeded as primary exception
364-
source_exc: Exception = core_exceptions.DeadlineExceeded(
365-
f"operation_timeout of {timeout_val} exceeded"
366-
)
367-
elif exc_list:
368-
# otherwise, raise non-retryable error as primary exception
369-
source_exc = exc_list.pop()
370-
else:
371-
source_exc = RuntimeError("failed with unspecified exception")
372-
# use the retry exception group as the cause of the exception
373-
cause_exc: Exception | None = (
374-
RetryExceptionGroup(exc_list) if exc_list else None
375-
)
376-
source_exc.__cause__ = cause_exc
377-
return source_exc, cause_exc

google/cloud/bigtable/data/_async/client.py

Lines changed: 25 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
import random
3434
import os
3535

36+
from functools import partial
3637

3738
from google.cloud.bigtable_v2.services.bigtable.client import BigtableClientMeta
3839
from google.cloud.bigtable_v2.services.bigtable.async_client import BigtableAsyncClient
@@ -43,9 +44,8 @@
4344
)
4445
from google.cloud.bigtable_v2.types.bigtable import PingAndWarmRequest
4546
from google.cloud.client import ClientWithProject
46-
from google.api_core.exceptions import GoogleAPICallError
4747
from google.cloud.environment_vars import BIGTABLE_EMULATOR # type: ignore
48-
from google.api_core import retry_async as retries
48+
from google.api_core import retry as retries
4949
from google.api_core.exceptions import DeadlineExceeded
5050
from google.api_core.exceptions import ServiceUnavailable
5151
from google.api_core.exceptions import Aborted
@@ -65,7 +65,7 @@
6565
from google.cloud.bigtable.data._helpers import _WarmedInstanceKey
6666
from google.cloud.bigtable.data._helpers import _CONCURRENCY_LIMIT
6767
from google.cloud.bigtable.data._helpers import _make_metadata
68-
from google.cloud.bigtable.data._helpers import _convert_retry_deadline
68+
from google.cloud.bigtable.data._helpers import _retry_exception_factory
6969
from google.cloud.bigtable.data._helpers import _validate_timeouts
7070
from google.cloud.bigtable.data._helpers import _get_retryable_errors
7171
from google.cloud.bigtable.data._helpers import _get_timeouts
@@ -223,7 +223,7 @@ async def close(self, timeout: float = 2.0):
223223

224224
async def _ping_and_warm_instances(
225225
self, channel: grpc.aio.Channel, instance_key: _WarmedInstanceKey | None = None
226-
) -> list[GoogleAPICallError | None]:
226+
) -> list[BaseException | None]:
227227
"""
228228
Prepares the backend for requests on a channel
229229
@@ -578,7 +578,6 @@ async def read_rows_stream(
578578
will be chained with a RetryExceptionGroup containing GoogleAPIError exceptions
579579
from any retries that failed
580580
- GoogleAPIError: raised if the request encounters an unrecoverable error
581-
- IdleTimeout: if iterator was abandoned
582581
"""
583582
operation_timeout, attempt_timeout = _get_timeouts(
584583
operation_timeout, attempt_timeout, self
@@ -761,6 +760,9 @@ async def read_rows_sharded(
761760
for result in batch_result:
762761
if isinstance(result, Exception):
763762
error_dict[shard_idx] = result
763+
elif isinstance(result, BaseException):
764+
# BaseException not expected; raise immediately
765+
raise result
764766
else:
765767
results_list.extend(result)
766768
shard_idx += 1
@@ -872,22 +874,8 @@ async def sample_row_keys(
872874
# prepare retryable
873875
retryable_excs = _get_retryable_errors(retryable_errors, self)
874876
predicate = retries.if_exception_type(*retryable_excs)
875-
transient_errors = []
876877

877-
def on_error_fn(exc):
878-
# add errors to list if retryable
879-
if predicate(exc):
880-
transient_errors.append(exc)
881-
882-
retry = retries.AsyncRetry(
883-
predicate=predicate,
884-
timeout=operation_timeout,
885-
initial=0.01,
886-
multiplier=2,
887-
maximum=60,
888-
on_error=on_error_fn,
889-
is_stream=False,
890-
)
878+
sleep_generator = retries.exponential_sleep_generator(0.01, 2, 60)
891879

892880
# prepare request
893881
metadata = _make_metadata(self.table_name, self.app_profile_id)
@@ -902,10 +890,13 @@ async def execute_rpc():
902890
)
903891
return [(s.row_key, s.offset_bytes) async for s in results]
904892

905-
wrapped_fn = _convert_retry_deadline(
906-
retry(execute_rpc), operation_timeout, transient_errors, is_async=True
893+
return await retries.retry_target_async(
894+
execute_rpc,
895+
predicate,
896+
sleep_generator,
897+
operation_timeout,
898+
exception_factory=_retry_exception_factory,
907899
)
908-
return await wrapped_fn()
909900

910901
def mutations_batcher(
911902
self,
@@ -1014,37 +1005,25 @@ async def mutate_row(
10141005
# mutations should not be retried
10151006
predicate = retries.if_exception_type()
10161007

1017-
transient_errors = []
1018-
1019-
def on_error_fn(exc):
1020-
if predicate(exc):
1021-
transient_errors.append(exc)
1008+
sleep_generator = retries.exponential_sleep_generator(0.01, 2, 60)
10221009

1023-
retry = retries.AsyncRetry(
1024-
predicate=predicate,
1025-
on_error=on_error_fn,
1026-
timeout=operation_timeout,
1027-
initial=0.01,
1028-
multiplier=2,
1029-
maximum=60,
1030-
)
1031-
# wrap rpc in retry logic
1032-
retry_wrapped = retry(self.client._gapic_client.mutate_row)
1033-
# convert RetryErrors from retry wrapper into DeadlineExceeded errors
1034-
deadline_wrapped = _convert_retry_deadline(
1035-
retry_wrapped, operation_timeout, transient_errors, is_async=True
1036-
)
1037-
metadata = _make_metadata(self.table_name, self.app_profile_id)
1038-
# trigger rpc
1039-
await deadline_wrapped(
1010+
target = partial(
1011+
self.client._gapic_client.mutate_row,
10401012
row_key=row_key.encode("utf-8") if isinstance(row_key, str) else row_key,
10411013
mutations=[mutation._to_pb() for mutation in mutations_list],
10421014
table_name=self.table_name,
10431015
app_profile_id=self.app_profile_id,
10441016
timeout=attempt_timeout,
1045-
metadata=metadata,
1017+
metadata=_make_metadata(self.table_name, self.app_profile_id),
10461018
retry=None,
10471019
)
1020+
return await retries.retry_target_async(
1021+
target,
1022+
predicate,
1023+
sleep_generator,
1024+
operation_timeout,
1025+
exception_factory=_retry_exception_factory,
1026+
)
10481027

10491028
async def bulk_mutate_rows(
10501029
self,

google/cloud/bigtable/data/_async/mutations_batcher.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -489,6 +489,9 @@ async def _wait_for_batch_results(
489489
if isinstance(result, Exception):
490490
# will receive direct Exception objects if request task fails
491491
found_errors.append(result)
492+
elif isinstance(result, BaseException):
493+
# BaseException not expected from grpc calls. Raise immediately
494+
raise result
492495
elif result:
493496
# completed requests will return a list of FailedMutationEntryError
494497
for e in result:

0 commit comments

Comments
 (0)