Skip to content

Commit

Permalink
Be sure to invalidate cache for both branched of pipe_file (#904)
Browse files Browse the repository at this point in the history
  • Loading branch information
martindurant authored Oct 20, 2024
1 parent 7fea0f5 commit 1ddf5e6
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 2 deletions.
6 changes: 4 additions & 2 deletions s3fs/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1141,9 +1141,11 @@ async def _pipe_file(
size = len(data)
# 5 GB is the limit for an S3 PUT
if size < min(5 * 2**30, 2 * chunksize):
return await self._call_s3(
out = await self._call_s3(
"put_object", Bucket=bucket, Key=key, Body=data, **kwargs
)
self.invalidate_cache(path)
return out
else:

mpu = await self._call_s3(
Expand Down Expand Up @@ -1177,7 +1179,7 @@ async def _pipe_file(
UploadId=mpu["UploadId"],
MultipartUpload={"Parts": parts},
)
self.invalidate_cache(path)
self.invalidate_cache(path)

async def _put_file(
self,
Expand Down
10 changes: 10 additions & 0 deletions s3fs/tests/test_s3fs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2802,3 +2802,13 @@ def test_upload_part_with_prime_pads(s3_fixed_upload_size):

with s3_fixed_upload_size.open(a, "r") as f:
assert len(f.read()) == 2 * block + pad1 + pad2


@pytest.mark.asyncio
async def test_invalidate_cache(s3: s3fs.S3FileSystem) -> None:
await s3._call_s3("put_object", Bucket=test_bucket_name, Key="a/b.txt", Body=b"abc")
before = await s3._ls(f"{test_bucket_name}/a/")
assert sorted(before) == ["test/a/b.txt"]
await s3._pipe_file(f"{test_bucket_name}/a/c.txt", data=b"abc")
after = await s3._ls(f"{test_bucket_name}/a/")
assert sorted(after) == ["test/a/b.txt", "test/a/c.txt"]

0 comments on commit 1ddf5e6

Please sign in to comment.