Skip to content

Commit

Permalink
increase chunk size to deal with larger files
Browse files Browse the repository at this point in the history
  • Loading branch information
bennybp committed Feb 6, 2025
1 parent f511950 commit b4f65db
Showing 1 changed file with 5 additions and 2 deletions.
7 changes: 5 additions & 2 deletions qcfractal/qcfractal/components/external_files/socket.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,9 @@ def add_data(
session.flush()

try:
while chunk := file_data.read(10 * 1024 * 1024):
# Chunk size = 256MiB. There is a limit of 10,000 chunks, which would be
# 2.56TiB
while chunk := file_data.read(256 * 1024 * 1024):
if job_progress is not None:
job_progress.raise_if_cancelled()

Expand Down Expand Up @@ -192,7 +194,8 @@ def add_file(
ID of the external file (which is also set in the given ORM object)
"""

self._logger.info(f"Uploading {file_path} to S3. File size: {os.path.getsize(file_path)/1048576} MiB")
file_size = os.path.getsize(file_path)
self._logger.info(f"Uploading {file_path} to S3. File size: {file_size/1048576} MiB")

with open(file_path, "rb") as f:
return self.add_data(f, file_orm, job_progress=job_progress, session=session)
Expand Down

0 comments on commit b4f65db

Please sign in to comment.