-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.py
124 lines (96 loc) · 3.06 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
import vercel_blob
import os
import logging
import json
import time
from datetime import datetime
import pprint
logging.basicConfig(filename='app.log', filemode='a+', format='%(name)s - %(asctime)s - %(levelname)-8s - %(message)s')
logger = logging.getLogger("utils")
logger.setLevel(logging.INFO)
def _convert_timestamp_to_date(timestamp_str):
timestamp = int(timestamp_str)
time_struct = time.gmtime(timestamp)
formatted_date = time.strftime('%d-%m-%Y', time_struct)
return formatted_date
def _iso_to_ddmmyy(iso_timestamp: str) -> str:
# Parse the ISO timestamp
dt_obj = datetime.strptime(iso_timestamp, "%Y-%m-%dT%H:%M:%S.%fZ")
# Format to ddmmyy
return dt_obj.strftime("%d-%m-%Y")
def list_all_blobs():
'''
List all blobs in the Blob storage
'''
hasMore = True
resp_blob = []
blobs = []
while hasMore == True:
resp = vercel_blob.list()
resp_blob.extend(resp.get('blobs'))
hasMore = resp.get('hasMore')
# pprint.pprint(resp)
for blob in resp_blob:
if blob.get('pathname').startswith('cache/'):
blobs.append(blob)
for i, blob in enumerate(blobs):
blob['id'] = i + 1
blob['filename'] = blob.get('contentDisposition').split('filename=')[1].replace('"', '')
blob['uploadedAt'] = _iso_to_ddmmyy(blob.get('uploadedAt'))
blob_size = blob.get('size', 0)
if blob_size < 1024:
blob['size'] = f"{blob_size} B"
elif blob_size < 1024 * 1024:
blob['size'] = f"{blob_size / 1024:.2f} KB"
else:
blob['size'] = f"{blob_size / (1024 * 1024):.2f} MB"
pprint.pprint(blobs)
with open('blobs.json', 'w') as f:
json.dump(blobs, f)
# Download blobs to cache
for blob in blobs:
filename = blob.get('filename')
if filename not in os.listdir('cache'):
logger.info(f"Downloading {filename} from Vercel Blob")
vercel_blob.download_file(blob.get('url'), f'cache/')
else:
logger.info(f"{filename} already exists in cache")
return blobs
def get_all_blobs_from_cache():
'''
Get all blobs from blobs.json
'''
blobs = []
if os.path.exists('blobs.json'):
with open('blobs.json', 'r') as f:
blobs = json.load(f)
return blobs
def upload_file(file):
'''
Upload a file to the Blob storage
'''
filename = file.filename
logger.info(f"Uploading file: {filename}")
# Save the file to cache
file.save(f'cache/{filename}')
# Upload the file to Blob storage
with open(f'cache/{filename}', 'rb') as f:
print(type(f))
vercel_blob.put(path = f'cache/{filename}', data = f.read(), options = {
'addRandomSuffix': 'false',
})
return True
def delete_blob(url):
'''
Delete a blob from the Blob storage
'''
logger.info(f"Deleting blob: {url}")
vercel_blob.delete(url)
return True
def refresh_cache():
'''
Refresh the cache
'''
logger.info("Refreshing cache")
list_all_blobs()
return True