11import asyncio
2+ import dataclasses
23import hashlib
34import hmac
45import io
1314import socketio
1415from tqdm import tqdm
1516from core .config import Config
16- from core import certificate , unit
17+ from core import certificate , system , unit
1718from core .timer import Task , Timer
1819import pyzstd as zstd
1920import core .utils as utils
2526from core .api import (
2627 File ,
2728 BMCLAPIFile ,
29+ StatsCache ,
2830 Storage ,
2931 get_hash ,
3032)
@@ -54,6 +56,7 @@ async def fetchToken(self):
5456 async with aiohttp .ClientSession (
5557 headers = {"User-Agent" : USER_AGENT }, base_url = BASE_URL
5658 ) as session :
59+ logger .info ("Fetching token" )
5760 try :
5861 async with session .get (
5962 "/openbmclapi-agent/challenge" , params = {"clusterId" : CLUSTER_ID }
@@ -80,15 +83,14 @@ async def fetchToken(self):
8083 Timer .delay (
8184 self .fetchToken , delay = float (content ["ttl" ]) / 1000.0 - 600
8285 )
86+ logger .info ("Fetched token" )
8387
8488 except aiohttp .ClientError as e :
8589 logger .error (f"Error fetching token: { e } ." )
8690
8791 async def getToken (self ) -> str :
8892 if not self .token :
89- logger .info ("Fetching token" )
9093 await self .fetchToken ()
91- logger .info ("Fetched token" )
9294 return self .token or ""
9395
9496class ParseFileList :
@@ -152,6 +154,8 @@ async def _download(self, pbar: tqdm, session: aiohttp.ClientSession):
152154 except :
153155 pbar .update (- size )
154156 await self .queues .put (file )
157+ if cluster :
158+ await cluster ._check_files_sync_status ("下载文件中" , pbar , unit .format_more_bytes )
155159 await session .close ()
156160
157161 async def _mount_file (self , file : BMCLAPIFile ):
@@ -166,9 +170,12 @@ async def _mount_file(self, file: BMCLAPIFile):
166170 logger .error (traceback .format_exc ())
167171 if result != file .size :
168172 logger .error (f"Download file error: File { file .hash } ({ unit .format_bytes (file .size )} ) copy to target file error: { file .hash } ({ unit .format_bytes (result )} )" )
169- os .remove (f"./cache/download/{ file .hash [:2 ]} /{ file .hash } " )
173+ try :
174+ os .remove (f"./cache/download/{ file .hash [:2 ]} /{ file .hash } " )
175+ except :
176+ ...
170177 async def download (self , storages : list ['Storage' ], miss : list [BMCLAPIFile ]):
171- with tqdm (desc = "Downloading files" , unit = "bytes " , unit_divisor = 1024 , total = sum ((file .size for file in miss )), unit_scale = True ) as pbar :
178+ with tqdm (desc = "Downloading files" , unit = "b " , unit_divisor = 1024 , total = sum ((file .size for file in miss )), unit_scale = True ) as pbar :
172179 self .storages = storages
173180 for file in miss :
174181 await self .queues .put (file )
@@ -197,13 +204,12 @@ def __init__(self, dir: Path) -> None:
197204 raise FileExistsError ("The path is file." )
198205 self .dir .mkdir (exist_ok = True , parents = True )
199206 self .cache : dict [str , File ] = {}
200- self .stats : stats .StorageStats = stats .get_storage (f"File_{ self .dir } " )
201207 self .timer = Timer .repeat (self .clear_cache , (), CHECK_CACHE , CHECK_CACHE )
202208 async def get (self , hash : str ) -> File :
203209 if hash in self .cache :
204210 file = self .cache [hash ]
205211 file .last_access = time .time ()
206- self . stats . hit ( file , cache = True )
212+ file . cache = True
207213 return file
208214 path = Path (str (self .dir ) + f"/{ hash [:2 ]} /{ hash } " )
209215 buf = io .BytesIO ()
@@ -213,12 +219,17 @@ async def get(self, hash: str) -> File:
213219 file = File (path , hash , buf .tell (), time .time (), time .time ())
214220 file .set_data (buf .getbuffer ())
215221 self .cache [hash ] = file
216- self . stats . hit ( file )
222+ file . cache = False
217223 return file
218224 async def exists (self , hash : str ) -> bool :
219225 return os .path .exists (str (self .dir ) + f"/{ hash [:2 ]} /{ hash } " )
220226 async def get_size (self , hash : str ) -> int :
221227 return os .path .getsize (str (self .dir ) + f"/{ hash [:2 ]} /{ hash } " )
228+ async def copy (self , origin : Path , hash : str ):
229+ Path (str (self .dir ) + f"/{ hash [:2 ]} /{ hash } " ).parent .mkdir (exist_ok = True , parents = True )
230+ async with aiofiles .open (str (self .dir ) + f"/{ hash [:2 ]} /{ hash } " , "wb" ) as w , aiofiles .open (origin , "rb" ) as r :
231+ await w .write (await r .read ())
232+ return origin .stat ().st_size
222233 async def write (self , hash : str , io : io .BytesIO ) -> int :
223234 Path (str (self .dir ) + f"/{ hash [:2 ]} /{ hash } " ).parent .mkdir (exist_ok = True , parents = True )
224235 async with aiofiles .open (str (self .dir ) + f"/{ hash [:2 ]} /{ hash } " , "wb" ) as w :
@@ -258,9 +269,10 @@ async def clear_cache(self):
258269 logger .info (f"Outdate caches: { unit .format_number (len (old_keys ))} ({ unit .format_bytes (old_size )} )" )
259270 async def get_files (self , dir : str ) -> list [str ]:
260271 files = []
261- with os .scandir (str (self .dir ) + f"/{ dir } " ) as session :
262- for file in session :
263- files .append (file .name )
272+ if os .path .exists (str (self .dir ) + f"/{ dir } " ):
273+ with os .scandir (str (self .dir ) + f"/{ dir } " ) as session :
274+ for file in session :
275+ files .append (file .name )
264276 return files
265277 async def removes (self , hashs : list [str ]) -> int :
266278 success = 0
@@ -272,14 +284,25 @@ async def removes(self, hashs: list[str]) -> int:
272284 return success
273285 async def get_files_size (self , dir : str ) -> int :
274286 size = 0
275- with os .scandir (str (self .dir ) + f"/{ dir } " ) as session :
276- for file in session :
277- size += file .stat ().st_size
287+ if os .path .exists (str (self .dir ) + f"/{ dir } " ):
288+ with os .scandir (str (self .dir ) + f"/{ dir } " ) as session :
289+ for file in session :
290+ size += file .stat ().st_size
278291 return size
292+ async def get_cache_stats (self ) -> StatsCache :
293+ stat = StatsCache ()
294+ for file in self .cache .values ():
295+ stat .total += 1
296+ stat .bytes += file .size
297+ return stat
298+ class WebDav (Storage ):
299+ def __init__ (self ) -> None :
300+ super ().__init__ ()
279301class Cluster :
280302 def __init__ (self ) -> None :
281303 self .sio = socketio .AsyncClient ()
282304 self .storages : list [Storage ] = []
305+ self .storage_stats : dict [Storage , stats .StorageStats ] = {}
283306 self .started = False
284307 self .sio .on ("message" , self ._message )
285308 self .cur_storage : Optional [stats .SyncStorage ] = None
@@ -293,13 +316,21 @@ def _message(self, message):
293316 logger .info (f"[Remote] { message } " )
294317 if "信任度过低" in message :
295318 self .trusted = False
319+ def get_storages (self ):
320+ return self .storages .copy ()
296321 def add_storage (self , storage ):
297322 self .storages .append (storage )
323+ type = "Unknown"
324+ key = time .time ()
325+ if isinstance (storage , FileStorage ):
326+ type = "File"
327+ key = storage .dir
328+ self .storage_stats [storage ] = stats .get_storage (f"{ type } _{ key } " )
298329
299- async def _check_files_sync_status (self , text : str , pbar : tqdm ):
330+ async def _check_files_sync_status (self , text : str , pbar : tqdm , format = unit . format_numbers ):
300331 if self .check_files_timer :
301332 return
302- n , total = unit . format_numbers (pbar .n , pbar .total )
333+ n , total = format (pbar .n , pbar .total )
303334 await set_status (f"{ text } ({ n } /{ total } )" )
304335
305336 async def check_files (self ):
@@ -377,7 +408,6 @@ async def check_files(self):
377408 if paths :
378409 for path in paths :
379410 os .remove (path )
380- pbar .disable ()
381411 pbar .update (1 )
382412 if dir :
383413 for d in dir :
@@ -402,8 +432,19 @@ async def start(self, ):
402432 await self .check_files ()
403433 await set_status ("启动服务" )
404434 await self .enable ()
405- async def get (self , hash ):
406- return await self .storages [0 ].get (hash )
435+ async def get (self , hash ) -> File :
436+ storage = self .storages [0 ]
437+ stat = self .storage_stats [storage ]
438+ file = await storage .get (hash )
439+ stat .hit (file )
440+ return file
441+ async def get_cache_stats (self ) -> StatsCache :
442+ stat = StatsCache ()
443+ for storage in self .storages :
444+ t = await storage .get_cache_stats ()
445+ stat .total += t .total
446+ stat .bytes += t .bytes
447+ return stat
407448 async def exists (self , hash ):
408449 return await self .storages [0 ].exists (hash )
409450 async def enable (self ) -> None :
@@ -486,14 +527,16 @@ async def _keepalive(self):
486527 "bytes" : storage .get_total_bytes () - storage .get_last_bytes (),
487528 })
488529 await self .start_keepalive (300 )
489- async def _keepalive_timeout (self ):
490- logger .warn ("Failed to keepalive? Reconnect the main" )
530+ async def reconnect (self ):
491531 try :
492532 await self .disable ()
493533 except :
494534 ...
495535 await self .cert ()
496536 await self .enable ()
537+ async def _keepalive_timeout (self ):
538+ logger .warn ("Failed to keepalive? Reconnect the main" )
539+ await self .reconnect ()
497540 async def cert (self ):
498541 if Path ("./.ssl/cert" ).exists () == Path ("./.ssl/key" ).exists () == True :
499542 return
@@ -574,6 +617,13 @@ async def process(type: str, data: Any):
574617 async with aiohttp .ClientSession (BASE_URL ) as session :
575618 async with session .get (data ) as resp :
576619 return resp .json ()
620+ if type == "system" :
621+ return {
622+ "memory" : system .get_used_memory (),
623+ "connections" : system .get_connections (),
624+ "cpu" : system .get_cpus (),
625+ "cache" : dataclasses .asdict (await cluster .get_cache_stats ()) if cluster else StatsCache ()
626+ }
577627
578628token = TokenManager ()
579629cluster : Optional [Cluster ] = None
@@ -592,6 +642,7 @@ async def set_status(text: str):
592642async def init ():
593643 global cluster
594644 cluster = Cluster ()
645+ system .init ()
595646 plugins .load_plugins ()
596647 for plugin in plugins .get_plugins ():
597648 await plugin .init ()
0 commit comments