@@ -33,44 +33,50 @@ async def download_chunk(
3333 ):
3434 range_header = {"Range" : f"bytes={ start } -{ stop } " }
3535 headers .update (range_header )
36- async with session .get (url , headers = headers ) as response :
37- response .raise_for_status ()
38- content = await response .read ()
39-
40- async with aiofiles .open (filename , "r+b" ) as f :
41- await f .seek (start )
42- await f .write (content )
36+ try :
37+ async with session .get (url , headers = headers ) as response :
38+ response .raise_for_status ()
39+ content = await response .read ()
40+
41+ async with aiofiles .open (filename , "r+b" ) as f :
42+ await f .seek (start )
43+ await f .write (content )
44+ except Exception as e :
45+ print (f"Error in download_chunk: { e } " )
4346
4447 async def download_file (
4548 self , url : str , filename : str , max_files : int , chunk_size : int , headers : Optional [Dict [str , str ]] = None ,
4649 parallel_failures : int = 0 , max_retries : int = 0 , callback : Optional [Any ] = None
4750 ):
4851 headers = headers or {}
49- async with aiohttp .ClientSession () as session :
50- async with session .head (url ) as resp :
51- file_size = int (resp .headers ['Content-Length' ])
52- chunks = range (0 , file_size , chunk_size )
53-
54- # Create an empty file
55- async with aiofiles .open (filename , "wb" ) as f :
56- await f .seek (file_size - 1 )
57- await f .write (b"\0 " )
58-
59- semaphore = asyncio .Semaphore (max_files )
60- tasks = []
61- for start in chunks :
62- stop = min (start + chunk_size - 1 , file_size - 1 )
63- tasks .append (self .download_chunk_with_retries (
64- session , url , filename , start , stop , headers , semaphore , parallel_failures , max_retries
65- ))
66-
67- progress_bar = tqdm (total = file_size , unit = "B" , unit_scale = True , desc = "Downloading on 🔥" )
68- for chunk_result in asyncio .as_completed (tasks ):
69- downloaded = await chunk_result
70- progress_bar .update (downloaded )
71- if callback :
72- await callback (downloaded )
73- progress_bar .close ()
52+ try :
53+ async with aiohttp .ClientSession () as session :
54+ async with session .head (url ) as resp :
55+ file_size = int (resp .headers ['Content-Length' ])
56+ chunks = range (0 , file_size , chunk_size )
57+
58+ # Create an empty file
59+ async with aiofiles .open (filename , "wb" ) as f :
60+ await f .seek (file_size - 1 )
61+ await f .write (b"\0 " )
62+
63+ semaphore = asyncio .Semaphore (max_files )
64+ tasks = []
65+ for start in chunks :
66+ stop = min (start + chunk_size - 1 , file_size - 1 )
67+ tasks .append (self .download_chunk_with_retries (
68+ session , url , filename , start , stop , headers , semaphore , parallel_failures , max_retries
69+ ))
70+
71+ progress_bar = tqdm (total = file_size , unit = "B" , unit_scale = True , desc = "Downloading on 🔥" )
72+ for chunk_result in asyncio .as_completed (tasks ):
73+ downloaded = await chunk_result
74+ progress_bar .update (downloaded )
75+ if callback :
76+ await callback (downloaded )
77+ progress_bar .close ()
78+ except Exception as e :
79+ print (f"Error in download_file: { e } " )
7480
7581 async def download_chunk_with_retries (
7682 self , session : ClientSession , url : str , filename : str , start : int , stop : int , headers : Dict [str , str ],
@@ -95,21 +101,23 @@ async def upload_file(
95101 file_size = os .path .getsize (file_path )
96102 tasks = []
97103 semaphore = asyncio .Semaphore (max_files )
98-
99- async with aiohttp .ClientSession () as session :
100- for part_number , part_url in enumerate (parts_urls ):
101- start = part_number * chunk_size
102- tasks .append (self .upload_chunk_with_retries (
103- session , part_url , file_path , start , chunk_size , semaphore , parallel_failures , max_retries
104- ))
105-
106- progress_bar = tqdm (total = file_size , unit = "B" , unit_scale = True , desc = "Uploading on 🔥" )
107- for chunk_result in asyncio .as_completed (tasks ):
108- uploaded = await chunk_result
109- progress_bar .update (uploaded )
110- if callback :
111- await callback (uploaded )
112- progress_bar .close ()
104+ try :
105+ async with aiohttp .ClientSession () as session :
106+ for part_number , part_url in enumerate (parts_urls ):
107+ start = part_number * chunk_size
108+ tasks .append (self .upload_chunk_with_retries (
109+ session , part_url , file_path , start , chunk_size , semaphore , parallel_failures , max_retries
110+ ))
111+
112+ progress_bar = tqdm (total = file_size , unit = "B" , unit_scale = True , desc = "Uploading on 🔥" )
113+ for chunk_result in asyncio .as_completed (tasks ):
114+ uploaded = await chunk_result
115+ progress_bar .update (uploaded )
116+ if callback :
117+ await callback (uploaded )
118+ progress_bar .close ()
119+ except Exception as e :
120+ print (f"Error in upload_file: { e } " )
113121
114122 async def upload_chunk_with_retries (
115123 self , session : ClientSession , url : str , file_path : str , start : int , chunk_size : int ,
@@ -129,13 +137,16 @@ async def upload_chunk_with_retries(
129137 async def upload_chunk (
130138 self , session : ClientSession , url : str , file_path : str , start : int , chunk_size : int
131139 ):
132- async with aiofiles .open (file_path , 'rb' ) as f :
133- await f .seek (start )
134- chunk = await f .read (chunk_size )
135- headers = {'Content-Length' : str (len (chunk ))}
136- async with session .put (url , data = chunk , headers = headers ) as response :
137- response .raise_for_status ()
138- return len (chunk )
140+ try :
141+ async with aiofiles .open (file_path , 'rb' ) as f :
142+ await f .seek (start )
143+ chunk = await f .read (chunk_size )
144+ headers = {'Content-Length' : str (len (chunk ))}
145+ async with session .put (url , data = chunk , headers = headers ) as response :
146+ response .raise_for_status ()
147+ return len (chunk )
148+ except Exception as e :
149+ print (f"Error in upload_chunk: { e } " )
139150
140151 def download (self , url : str , filename : str , max_files : int , chunk_size : int ):
141152 asyncio .run (self .download_file (url , filename , max_files , chunk_size ))
@@ -154,18 +165,21 @@ def normal_download(self, url: str, filename: str):
154165 progress_bar .close ()
155166
156167 def compare_speed (self , url : str , filename : str ):
157- start_time = time .time ()
158- self .normal_download (url , filename )
159- normal_time = time .time () - start_time
160-
161- os .remove (filename )
162-
163- start_time = time .time ()
164- asyncio .run (self .download_file (url , filename , max_files = 10 , chunk_size = 2 * 1024 * 1024 ))
165- fire_time = time .time () - start_time
166-
167- print (f"\n 🐌 Download Time: { normal_time :.2f} seconds" )
168- print (f"🔥 Download Time: { fire_time :.2f} seconds\n " )
168+ try :
169+ start_time = time .time ()
170+ self .normal_download (url , filename )
171+ normal_time = time .time () - start_time
172+
173+ os .remove (filename )
174+
175+ start_time = time .time ()
176+ asyncio .run (self .download_file (url , filename , max_files = 10 , chunk_size = 2 * 1024 * 1024 ))
177+ fire_time = time .time () - start_time
178+
179+ print (f"\n 🐌 Download Time: { normal_time :.2f} seconds" )
180+ print (f"🔥 Download Time: { fire_time :.2f} seconds\n " )
181+ except Exception as e :
182+ print (f"Error in compare_speed: { e } " )
169183
170184
171185if __name__ == "__main__" :
0 commit comments