1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768 |
- import oss2
- import traceback
- from utils.Utils import log
- import time
- import os
- def does_bucket_exist(bucket):
- try:
- bucket.get_bucket_info()
- except oss2.exceptions.NoSuchBucket:
- return False
- except:
- raise
- return True
- import hashlib
- def getMDFFromFile(path):
- _length = 0
- try:
- _md5 = hashlib.md5()
- with open(path,"rb") as f:
- while True:
- data = f.read(4096)
- if not data:
- break
- _length += len(data)
- _md5.update(data)
- return _md5.hexdigest(),_length
- except Exception as e:
- traceback.print_exc()
- return None,_length
- def uploadFileByPath(bucket,filepath,uploadpath,headers=None):
- try:
- start_time = time.time()
- log("uploading file of %s"%filepath)
- with open(filepath,"rb") as f:
- bucket.put_object(uploadpath,f,headers=headers)
- log("upload file of %s takes %ds"%(filepath,time.time()-start_time))
- return True
- except Exception as e:
- traceback.print_exc()
- log("upload object failed of %s"%(filepath))
- return False
- def deleteObject(bucket,objectName):
- try:
- bucket.delete_object(objectName)
- except Exception as e:
- log("delete object failed of %s"%objectName)
- def downloadFile(bucket,objectPath,localPath):
- try:
- # bucket.get_object_to_file(objectPath, localPath)
- oss2.resumable_download(bucket, objectPath, localPath,
- store=oss2.ResumableDownloadStore(root=os.path.join(os.path.dirname(__file__),"/../tmp")),
- multiget_threshold=200*1024,
- part_size=200*1024,
- num_threads=5)
- return True
- except Exception as e:
- log("download object failed of %s"%str(objectPath))
- return False
- if __name__=="__main__":
- print(getMDFFromFile('1578298842064.doc'))
|