Skip to content

Commit 1f396da

Browse files
committed
Fixed LFU making it possible to upload files to drive in chunks
pending CAE problem
1 parent 93909f9 commit 1f396da

File tree

2 files changed

+22
-15
lines changed

2 files changed

+22
-15
lines changed

src/msgraph_core/models/large_file_upload_session.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def __init__(
2020
):
2121
self._upload_url = upload_url
2222
self._expiration_date_time = expiration_date_time
23-
self.additional_data = additional_data if additional_data is not None else []
23+
self.additional_data = additional_data if additional_data is not None else {}
2424
self.is_cancelled = is_cancelled
2525
self.next_expected_ranges = next_expected_ranges if next_expected_ranges is not None else []
2626

@@ -66,8 +66,10 @@ def next_expected_ranges(self, value):
6666

6767
@staticmethod
6868
def create_from_discriminator_value(
69-
parse_node: ParseNode
69+
parse_node: Optional[ParseNode] = None
7070
) -> Optional['LargeFileUploadSession']:
71+
if not parse_node:
72+
return None
7173
return LargeFileUploadSession()
7274

7375
def serialize(self, writer: SerializationWriter) -> None:

src/msgraph_core/tasks/large_file_upload.py

Lines changed: 18 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import os
12
from typing import Callable, Optional, List, Tuple, Any, Dict
23
from io import BytesIO
34
from asyncio import Future
@@ -29,7 +30,10 @@ def __init__(
2930
self._upload_session = upload_session
3031
self._request_adapter = request_adapter
3132
self.stream = stream
32-
self.file_size = stream.getbuffer().nbytes
33+
try:
34+
self.file_size = stream.getbuffer().nbytes
35+
except AttributeError:
36+
self.file_size = os.stat(stream.name).st_size
3337
self.max_chunk_size = max_chunk_size
3438
cleaned_value = self.check_value_exists(
3539
upload_session, 'get_next_expected_range', ['next_expected_range', 'NextExpectedRange']
@@ -98,7 +102,7 @@ def upload_session_expired(
98102
interval = now - then
99103
if not isinstance(interval, timedelta):
100104
raise ValueError("Interval is not a timedelta")
101-
if interval.total_seconds() <= 0:
105+
if interval.total_seconds() >= 0:
102106
return True
103107
return False
104108

@@ -115,13 +119,15 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
115119
process_next = session
116120
# determine the range to be uploaded
117121
# even when resuming existing upload sessions.
118-
range_parts = self.next_range[0].split("-") if self.next_range else ['0']
122+
#range_parts = self.next_range[0].split("-") if self.next_range else ['0']
123+
124+
range_parts = self.next_range[0].split("-") if self.next_range else ['0', '0']
119125
end = min(int(range_parts[0]) + self.max_chunk_size - 1, self.file_size)
120126
uploaded_range = [range_parts[0], end]
121127
while self.chunks > 0:
122128
session = process_next
123129
try:
124-
lfu_session: Optional[LargeFileUploadSession] = await session
130+
lfu_session: LargeFileUploadSession = session
125131
if lfu_session is None:
126132
continue
127133
next_range = lfu_session.next_expected_ranges
@@ -137,9 +143,9 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
137143
self.next_range = next_range[0] + "-"
138144
process_next = await self.next_chunk(self.stream)
139145
except Exception as error:
140-
logging.error(f"Error uploading chunk {error}")
141-
raise # remove after manual testing
142-
self.chunks -= 1
146+
logging.error("Error uploading chunk %s", error)
147+
finally:
148+
self.chunks -= 1
143149
return session
144150

145151
@property
@@ -152,7 +158,6 @@ def next_range(self, value: Optional[str]) -> None:
152158

153159
async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int = 0) -> Future:
154160
upload_url = self.get_validated_upload_url(self.upload_session)
155-
156161
if not upload_url:
157162
raise ValueError('The upload session URL must not be empty.')
158163
info = RequestInformation()
@@ -177,15 +182,15 @@ async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int =
177182
end = min(end, self.max_chunk_size + start)
178183
chunk_data = file.read(end - start + 1)
179184
info.headers = HeadersCollection()
185+
access_token = "<place_holder_pending CAE fix>"
180186

181187
info.headers.try_add('Content-Range', f'bytes {start}-{end}/{self.file_size}')
182-
# info.headers.try_add(**info.request_headers) what do we do if headers need to be passed
183188
info.headers.try_add('Content-Length', str(len(chunk_data)))
184-
info.set_stream_content(BytesIO(chunk_data))
189+
info.headers.try_add("Content-Type", "application/octet-stream")
190+
info.headers.try_add("Authorization", f"Bearer {access_token}")
191+
info.set_stream_content(bytes(chunk_data)) # Convert chunk_data to bytes
185192
error_map: Dict[str, int] = {}
186-
187-
parsable_factory: LargeFileUploadSession = self.upload_session
188-
193+
parsable_factory = LargeFileUploadSession
189194
return await self.request_adapter.send_async(info, parsable_factory, error_map)
190195

191196
def get_file(self) -> BytesIO:

0 commit comments

Comments
 (0)