10000 sdk/python: ObjectFileReader minor follow-up fixes · NVIDIA/aistore@197f866 · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

Commit 197f866

Browse files
committed
sdk/python: ObjectFileReader minor follow-up fixes
Signed-off-by: Ryan Koo <rbk65@cornell.edu>
1 parent 011ff05 commit 197f866

File tree

5 files changed

+13
-10
lines changed

5 files changed

+13
-10
lines changed

python/CHANGELOG.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,11 @@ We structure this changelog in accordance with [Keep a Changelog](https://keepac
88

99
## Unreleased
1010

11-
## [1.13.7] - 2025-05-15
11+
### Changed
12+
13+
- Improve `ObjectFileReader` logging to include the full exception details and traceback when retrying and resuming.
14+
15+
## [1.13.8] - 2025-05-15
1216

1317
### Added
1418

python/aistore/sdk/obj/obj_file/utils.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,12 +59,13 @@ def handle_broken_stream(
5959
resume_total = increment_resume(resume_total, max_resume, err)
6060
obj_path = content_iterator.client.path
6161
logger.warning(
62-
"Chunked encoding error while reading '%s': (%s), retrying %d/%d",
62+
"Error while reading '%s', retrying %d/%d",
6363
obj_path,
64-
err,
6564
resume_total,
6665
max_resume,
66+
exc_info=err,
6767
)
68+
6869
# Create a new iterator from the last read position
6970
new_iter = content_iterator.iter(offset=resume_position)
7071
return new_iter, resume_total

python/aistore/sdk/obj/object_reader.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -98,8 +98,8 @@ def as_file(
9898
"""
9999
Create a read-only, non-seekable `ObjectFileReader` instance for streaming object data in chunks.
100100
This file-like object primarily implements the `read()` method to retrieve data sequentially,
101-
with automatic retry/resumption in case of stream interruptions (e.g. `ChunkedEncodingError`,
102-
`ConnectionError`) or timeouts (e.g. `ReadTimeout`, `ConnectTimeout`).
101+
with automatic retry/resumption in case of unexpected stream interruptions (e.g. `ChunkedEncodingError`,
102+
`ConnectionError`) or timeouts (e.g. `ReadTimeout`).
103103
104104
Args:
105105
buffer_size (int, optional): Currently unused; retained for backward compatibility and future

python/tests/perf/object_file/obj-file-reader-stress-test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ def test_with_interruptions(
5959

6060
# Perform ObjectFileReader read
6161
downloaded_data, resume_total = obj_file_reader_read(
62-
object_reader, READ_SIZE, BUFFER_SIZE, MAX_RESUME
62+
object_reader, READ_SIZE, MAX_RESUME
6363
)
6464

6565
end_time = time.time()

python/tests/perf/object_file/utils.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,13 +48,11 @@ def create_and_put_objects(bucket: Bucket, obj_size: int, num_objects: int) -> N
4848

4949

5050
def obj_file_reader_read(
51-
object_reader: ObjectReader, read_size: int, buffer_size: int, max_resume: int
51+
object_reader: ObjectReader, read_size: int, max_resume: int
5252
) -> Tuple[bytes, int]:
5353
"""Reads via ObjectFileReader instantiated from provided ObjectReader. Returns the downloaded data and total number of resumes."""
5454
result = bytearray()
55-
with object_reader.as_file(
56-
buffer_size=buffer_size, max_resume=max_resume
57-
) as obj_file:
55+
with object_reader.as_file(max_resume=max_resume) as obj_file:
5856
while True:
5957
data = obj_file.read(read_size)
6058
if not data:

0 commit comments

Comments
 (0)
0