Skip to content
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
116 changes: 89 additions & 27 deletions shotgun_api3/shotgun.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,8 +495,6 @@ class Shotgun(object):
)

_MULTIPART_UPLOAD_CHUNK_SIZE = 20000000
MAX_ATTEMPTS = 3 # Retries on failure
BACKOFF = 0.75 # Seconds to wait before retry, times the attempt number

def __init__(
self,
Expand Down Expand Up @@ -3757,8 +3755,16 @@ def _call_rpc(
if self.config.localized is True:
req_headers["locale"] = "auto"

attempt = 1
while attempt <= self.MAX_ATTEMPTS:
max_rpc_attempts = self.config.max_rpc_attempts
rpc_attempt_interval = self.config.rpc_attempt_interval / 1000.0

attempt = 0
while attempt < max_rpc_attempts:
if attempt:
time.sleep(attempt * rpc_attempt_interval)

attempt += 1

Comment on lines +3761 to +3767
Copy link

Copilot AI Nov 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The sleep duration calculation multiplies attempt * rpc_attempt_interval, but attempt starts at 0 and increments to 1 before the first iteration's logic. This means the first retry (after a failure) will sleep for 1 * rpc_attempt_interval, the second retry for 2 * rpc_attempt_interval, etc. However, in the old code, the sleep was attempt * BACKOFF where attempt started at 1, giving the same progression. The issue is that when attempt reaches max_rpc_attempts, the loop exits, but the last attempt number logged will be max_rpc_attempts. If max_rpc_attempts is 3, you'll see attempts 1, 2, 3, but only 3 actual iterations (0->1, 1->2, 2->3 before exit). This is correct behavior, but the new loop condition attempt < max_rpc_attempts with attempt starting at 0 means if max_rpc_attempts is 3, you get attempts numbered 1, 2, 3 (three attempts total), which matches the old behavior where MAX_ATTEMPTS = 3 gave attempts 1, 2, 3. No issue here on second thought, but worth verifying the intent is the same number of attempts.

Suggested change
attempt = 0
while attempt < max_rpc_attempts:
if attempt:
time.sleep(attempt * rpc_attempt_interval)
attempt += 1
for attempt in range(1, max_rpc_attempts + 1):
if attempt > 1:
time.sleep((attempt - 1) * rpc_attempt_interval)

Copilot uses AI. Check for mistakes.
http_status, resp_headers, body = self._make_call(
"POST",
self.config.api_path,
Expand All @@ -3776,10 +3782,8 @@ def _call_rpc(
# We've seen some rare instances of PTR returning 502 for issues that
# appear to be caused by something internal to PTR. We're going to
# allow for limited retries for those specifically.
if attempt != self.MAX_ATTEMPTS and e.errcode in [502, 504]:
if attempt < max_rpc_attempts and e.errcode in [502, 504]:
LOG.debug("Got a 502 or 504 response. Waiting and retrying...")
time.sleep(float(attempt) * self.BACKOFF)
attempt += 1
continue
elif e.errcode == 403:
# 403 is returned with custom error page when api access is blocked
Expand Down Expand Up @@ -3919,6 +3923,9 @@ def _make_call(
rpc_attempt_interval = self.config.rpc_attempt_interval / 1000.0

while attempt < max_rpc_attempts:
if attempt:
time.sleep(attempt * rpc_attempt_interval)

attempt += 1
try:
return self._http_request(verb, path, body, req_headers)
Expand All @@ -3938,6 +3945,7 @@ def _make_call(
if attempt == max_rpc_attempts:
LOG.debug("Request failed. Giving up after %d attempts." % attempt)
raise
# TODO create only one attempt for SSL errors.
Copy link

Copilot AI Nov 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This TODO comment is unclear about what 'create only one attempt' means. Consider rephrasing to something like 'TODO: Skip retries for SSL errors' or 'TODO: Fail immediately on SSL errors without retrying' to clarify the intended behavior.

Suggested change
# TODO create only one attempt for SSL errors.
# TODO: Fail immediately on SSL errors without retrying.

Copilot uses AI. Check for mistakes.
except Exception as e:
self._close_connection()
LOG.debug(f"Request failed. Reason: {e}", exc_info=True)
Expand All @@ -3947,7 +3955,6 @@ def _make_call(
"Request failed, attempt %d of %d. Retrying in %.2f seconds..."
% (attempt, max_rpc_attempts, rpc_attempt_interval)
)
time.sleep(rpc_attempt_interval)

def _http_request(
self, verb: str, path: str, body, headers: dict[str, Any]
Expand Down Expand Up @@ -4480,24 +4487,41 @@ def _upload_data_to_storage(
:rtype: str
"""

attempt = 1
while attempt <= self.MAX_ATTEMPTS:
## TODO - add unitests for those cases
Copy link

Copilot AI Nov 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Corrected spelling of 'unitests' to 'unittests'.

Suggested change
## TODO - add unitests for those cases
## TODO - add unittests for those cases

Copilot uses AI. Check for mistakes.
# storage_url = "https://untrusted-root.badssl.com/"
# storage_url = "https://wrong.host.badssl.com/"
# storage_url = "https://expired.badssl.com/"

attempt = 0
max_rpc_attempts = self.config.max_rpc_attempts
rpc_attempt_interval = self.config.rpc_attempt_interval / 1000.0

while attempt <= max_rpc_attempts:
Copy link

Copilot AI Nov 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Inconsistent loop condition: this uses attempt <= max_rpc_attempts while other retry loops in the same file use attempt < max_rpc_attempts (lines 3762, 3925, 4655). If max_rpc_attempts is 3, this loop will execute 4 times (attempt values 1, 2, 3, 4), while the others execute 3 times. This should be attempt < max_rpc_attempts for consistency.

Suggested change
while attempt <= max_rpc_attempts:
while attempt < max_rpc_attempts:

Copilot uses AI. Check for mistakes.
if attempt:
time.sleep(attempt * rpc_attempt_interval)

attempt += 1

try:
opener = self._build_opener(urllib.request.HTTPHandler)

request = urllib.request.Request(storage_url, data=data)
request.add_header("Content-Type", content_type)
request.add_header("Content-Length", size)
request.get_method = lambda: "PUT"
request = urllib.request.Request(
storage_url,
method="PUT",
headers={
"Content-Type": content_type,
"Content-Length": size,
},
data=data,
)

result = self._make_upload_request(request, opener)

LOG.debug("Completed request to %s" % request.get_method())
LOG.debug("Completed request to %s", safe_short_url(storage_url))

except urllib.error.HTTPError as e:
if attempt != self.MAX_ATTEMPTS and e.code in [500, 503]:
if attempt < max_rpc_attempts and e.code in [500, 503]:
LOG.debug("Got a %s response. Waiting and retrying..." % e.code)
time.sleep(float(attempt) * self.BACKOFF)
attempt += 1
continue
elif e.code in [500, 503]:
raise ShotgunError(
Expand All @@ -4510,9 +4534,20 @@ def _upload_data_to_storage(
% (storage_url, e)
)
except urllib.error.URLError as e:
if isinstance(e.reason, ssl.SSLError):
ssl_err = e.reason

LOG.debug(
f"Received an SSL error during request to {safe_short_url(storage_url)}"
)

if isinstance(ssl_err, ssl.SSLCertVerificationError):
LOG.debug(f"SSL certificate error occurred: {ssl_err}")
else:
LOG.debug(f"SSL error occurred: {ssl_err}")
raise

LOG.debug("Got a '%s' response. Waiting and retrying..." % e)
time.sleep(float(attempt) * self.BACKOFF)
attempt += 1
continue
else:
break
Expand Down Expand Up @@ -4613,19 +4648,22 @@ def _send_form(self, url: str, params: dict[str, Any]) -> str:

params.update(self._auth_params())

attempt = 1
while attempt <= self.MAX_ATTEMPTS:
max_rpc_attempts = self.config.max_rpc_attempts
rpc_attempt_interval = self.config.rpc_attempt_interval / 1000.0

attempt = 0
while attempt < max_rpc_attempts:
if attempt:
time.sleep(attempt * rpc_attempt_interval)

attempt += 1

# Perform the request
try:
opener = self._build_opener(FormPostHandler)
resp = opener.open(url, params)
result = resp.read()
# response headers are in str(resp.info()).splitlines()
except urllib.error.URLError as e:
LOG.debug("Got a %s response. Waiting and retrying..." % e)
time.sleep(float(attempt) * self.BACKOFF)
attempt += 1
continue
except urllib.error.HTTPError as e:
if e.code == 500:
raise ShotgunError(
Expand All @@ -4636,6 +4674,10 @@ def _send_form(self, url: str, params: dict[str, Any]) -> str:
else:
raise ShotgunError("Unanticipated error occurred %s" % (e))

except urllib.error.URLError as e:
LOG.debug("Got a %s response. Waiting and retrying...", e)
continue

if isinstance(result, bytes):
result = result.decode("utf-8")

Expand Down Expand Up @@ -4823,3 +4865,23 @@ def _get_type_and_id_from_value(value):
LOG.debug(f"Could not optimize entity value {value}")

return value


def safe_short_url(url: str, max_path_length: int = 80) -> str:
Copy link

Copilot AI Nov 14, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The safe_short_url function lacks a docstring. Given its purpose of sanitizing URLs for logging (removing credentials and truncating paths), a docstring would improve maintainability by explaining the security and formatting rationale.

Suggested change
def safe_short_url(url: str, max_path_length: int = 80) -> str:
def safe_short_url(url: str, max_path_length: int = 80) -> str:
"""
Sanitize a URL for safe logging by removing sensitive information and truncating long paths.
This function removes credentials (such as HTTP Basic Auth usernames/passwords and API keys)
from the URL, strips query parameters and fragments, and truncates the path if it exceeds
`max_path_length` by replacing the middle part with '[...]'. This helps prevent accidental
exposure of sensitive data in logs or error messages.
Args:
url (str): The URL to sanitize.
max_path_length (int): Maximum allowed length for the path component. Defaults to 80.
Returns:
str: The sanitized, shortened URL safe for logging.
"""

Copilot uses AI. Check for mistakes.
u = urllib.parse.urlparse(url)

# If the path is longer than the max_path_length, truncate it in the middle
if len(u.path) > max_path_length:
half_length = max_path_length // 2

u = u._replace(
path=u.path[: half_length - 3] + "[...]" + u.path[-half_length + 3 :]
)

return urllib.parse.urlunparse(
u._replace(
netloc=u.hostname, # Sanitize possible in URL credentials - HTTP Basic Auth
query="", # Sanitize possible in URL credentials - API keys
fragment="",
)
)
Loading