Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions google/cloud/bigquery/retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,12 @@
auth_exceptions.TransportError,
)

_DEFAULT_JOB_DEADLINE = 60.0 * 10.0 # seconds
_DEFAULT_RETRY_DEADLINE = 10.0 * 60.0 # 10 minutes

# Allow for a few retries after the API request times out. This relevant for
# rateLimitExceeded errors, which can be raised either by the Google load
# balancer or the BigQuery job server.
_DEFAULT_JOB_DEADLINE = 3.0 * _DEFAULT_RETRY_DEADLINE


def _should_retry(exc):
Expand All @@ -51,7 +56,7 @@ def _should_retry(exc):
return reason in _RETRYABLE_REASONS


DEFAULT_RETRY = retry.Retry(predicate=_should_retry, deadline=600.0)
DEFAULT_RETRY = retry.Retry(predicate=_should_retry, deadline=_DEFAULT_RETRY_DEADLINE)
"""The default retry object.

Any method with a ``retry`` parameter will be retried automatically,
Expand Down
5 changes: 3 additions & 2 deletions tests/unit/test_retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ def test_DEFAULT_JOB_RETRY_predicate():


def test_DEFAULT_JOB_RETRY_deadline():
from google.cloud.bigquery.retry import DEFAULT_JOB_RETRY
from google.cloud.bigquery.retry import DEFAULT_JOB_RETRY, DEFAULT_RETRY

assert DEFAULT_JOB_RETRY._deadline == 600
# Make sure we can retry the job at least once.
assert DEFAULT_JOB_RETRY._deadline > DEFAULT_RETRY._deadline