-
Notifications
You must be signed in to change notification settings - Fork 37
Expand file tree
/
Copy pathmain.py
More file actions
247 lines (216 loc) · 8.99 KB
/
main.py
File metadata and controls
247 lines (216 loc) · 8.99 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
"""
Date: 2023-10-23 18:24:31
LastEditors: Kumo
LastEditTime: 2024-09-28 17:21:58
Description:
"""
from auto_score.deploy_stragegies import *
from auto_score.utils.proxy_decorator import IS_AUTHOR_ENV
from auto_score.utils.singleton import get_instance, get_handler, GetHandlers
from auto_score.utils.logger import LoggerManager
from auto_score.directly_request.mms import MMS
from auto_score.ms_auth import MSAuth
from auto_score.onedrive.patch import upload_large_file
from auto_score.email import EmailHandler
import hashlib
import os
import base64
log_manager = LoggerManager(f"log/{__name__}.log")
logger = log_manager.logger
ERROR_MSGS = []
def parse_last_download(lines):
last_download = {}
for line in lines:
md5, timestamp = line.strip().split(" ")
last_download[md5] = float(timestamp)
return last_download
def generate_xoauth2(username, token):
xoauth = "user=%s\x01auth=Bearer %s\x01\x01" % (username, token)
xoauth = xoauth.encode("ascii")
xoauth = base64.b64encode(xoauth)
xoauth = xoauth.decode("ascii")
# print("XOAUTH2: ", xoauth)
return xoauth
def collect_errors(err):
logger.error(err)
ERROR_MSGS.append(err)
if __name__ == "__main__":
## 0. get config
env = os.environ.get("AUTO_HALCYON_ENV")
if not env and IS_AUTHOR_ENV:
env = "LOCAL"
assert env
if env == "LOCAL":
strategy = LocalStrategy()
elif env == "DOCKER":
strategy = DockerStrategy()
elif env == "GITHUB_ACTION":
strategy = GithubActionStrategy()
else:
collect_errors(f"env error, not support env: {env}")
os._exit(-1)
## 1. Init
### website handlers
mms_handler = MMS(strategy.email, strategy.password, strategy.savefolder_path)
### messy params
sub_filename = "./subscriptions" # source | user
last_download_filename = "./_last_download_signal"
## 2. load subscriptions and last downloading times
assert os.path.exists(sub_filename)
with open(sub_filename, "r", encoding="utf-8") as file:
subscriptions = file.readlines()
if os.path.exists(last_download_filename):
with open(last_download_filename, "r") as file:
last_download_lines = file.readlines()
last_downloads = (
parse_last_download(last_download_lines) if last_download_lines else {}
)
else:
last_download_lines = {}
## 3. for each sub, try to find new links in rss data and call cloudreve offline download
all_tasks_success = True
num_newly_downloads = 0
titles_newly_download = []
latest_downloads = {}
for sub in subscriptions:
parts = sub.strip().split("|")
assert len(parts) >= 2
source_name = parts[0]
user = parts[1]
full_description = "".join(parts)
md5 = hashlib.md5(full_description.encode("utf-8")).hexdigest()
last_sheetnum = last_downloads.get(md5, -1)
latest_downloads[md5] = last_sheetnum
parser = get_instance(source_name)
# if parser and parser.is_available:
handler = get_handler(source_name)
links, max_sheetnum, titles = handler.get_recent_sheets(
user, int(last_sheetnum)
)
if len(links) > 0: # only call downloading when having something new
# if cloudreve.create_directory(folder_to_root_dir): # also ok when folder exists
print(f"{source_name}_handler")
is_download_success = handler.download_sheets(links)
# is_download_success = cloudreve.add_offline_download_task(links, folder_to_root_dir)
if is_download_success:
num_newly_downloads += len(links)
latest_downloads[md5] = max_sheetnum
titles_newly_download.extend(titles)
logger.info(
f"Successfully download {len(links)} links into {strategy.savefolder_path}."
)
else: # failed when downloading
all_tasks_success = False
collect_errors(
f"Failed when downloading {user}'s sheets in RSS source {source_name}."
)
else: # nothing new
logger.warning("No new link found")
# else: # failed when getting parser
# all_tasks_success = False
# collect_errors(f"RSS source {source_name} is not available.")
### 4. collect all sheets
all_sheets_dir = []
for handler in GetHandlers():
all_sheets_dir.extend([path for path in handler.file_paths])
## 5. upload to onedrive
if strategy.enable_od_upload:
od_ms_auth = MSAuth(
strategy.od_client_id,
strategy.od_client_secret,
strategy.od_redirect_uri,
["files.readwrite", "user.read", "offline_access"],
"_od_refresh_token",
)
all_od_upload_success = True
for path in all_sheets_dir:
access_token = od_ms_auth.get_access_token()
if access_token:
# get filename from path with extension
upload_target = os.path.join(
strategy.od_upload_dir, os.path.basename(path)
).replace("\\", "/")
logger.debug(upload_target)
if upload_large_file(access_token, path, upload_target):
logger.info("Upload to onedrive successfully")
else:
all_od_upload_success = False
collect_errors("Failed to upload to onedrive")
else:
collect_errors("cannot refresh onedrive token")
## 6. update download data
with open(last_download_filename, "w") as file:
for md5, max_sheetnum in latest_downloads.items():
if max_sheetnum:
file.write(f"{md5} {max_sheetnum}\n")
logger.info("Update last download signal successfully.")
## 7. send email
has_error_prefix = "[ERROR] " if len(ERROR_MSGS) > 0 else ""
if strategy.enable_email_notify:
### check result and prepare mail data
logger.info("=" * 50)
logger.info("summary: ")
if all_tasks_success:
if num_newly_downloads > 0 and max_sheetnum > last_sheetnum:
subject = f"{has_error_prefix}Successfully downloading sheets."
content = "Success downloading the following sheet(s):\n{}".format(
"\n".join([title for title in titles_newly_download])
)
logger.info("All sheets start to download successfully.")
else: # nothing new
subject = f"{has_error_prefix}There's no new sheet or old sheet redownloaded to update."
content = "There's no new sheet!"
logger.info("There's no new sheet")
else: # download error
subject = f"{has_error_prefix}Failed to download all sheets."
content = "Failed..."
collect_errors("Failed to download all sheets.")
if has_error_prefix:
content += "ERROR msgs: \n{}".format("\n".join([err for err in ERROR_MSGS]))
logger.info("=" * 50)
if strategy.use_oauth2_outlook:
ms_auther = MSAuth(
strategy.outlook_client_id,
strategy.outlook_client_secret,
strategy.outlook_redirect_uri,
["https://outlook.office.com/SMTP.Send", "offline_access"],
"_outlook_refresh_token",
)
ms_access_token = ms_auther.get_access_token()
if not ms_access_token:
os._exit(-1)
else:
xoauth = generate_xoauth2(strategy.sender, ms_access_token)
email_handler = EmailHandler(
strategy.sender,
strategy.smtp_host,
strategy.smtp_port,
strategy.mail_license,
strategy.receivers,
xoauth=xoauth,
)
else:
email_handler = EmailHandler(
strategy.sender,
strategy.smtp_host,
strategy.smtp_port,
strategy.mail_license,
strategy.receivers,
)
# all_sheets_dir.extend(LoggerManager.get_all_log_filenames())
if not email_handler.perform_sending(
subject,
content,
sheet_files=all_sheets_dir,
log_files=(
LoggerManager.get_all_log_filenames() if strategy.send_logs else []
),
):
os._exit(-1)
else:
logger.info("Skip sending email.")
if has_error_prefix:
logger.error(
"ERROR msgs: \n{}".format("\n".join([err for err in ERROR_MSGS]))
)
raise Exception("Error occurred, please check log.")