-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathintegration.py
More file actions
153 lines (122 loc) · 4.92 KB
/
integration.py
File metadata and controls
153 lines (122 loc) · 4.92 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
import requests
from fake_useragent import UserAgent
import os
import sys
TMP_DIR = "./tmp"
SUB_URL_FILE = "./tmp/sub_url.txt"
SUB_CONF = "./tmp/sub.conf"
FULL_CONF = "./tmp/intel.conf"
LOCAL_CONF = "./tmp/SSR Cloud.conf"
RULES_URL_FULL = "https://raw.githubusercontent.com/ConnersHua/Profiles/master/Surge/Surge3.conf"
def file_dl(url):
ua = UserAgent()
print(ua.chrome)
# header = {'User-Agent':str(ua.chrome)}
header = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36"}
try:
file = requests.get(url, headers=header)
except requests.exceptions.RequestException as e:
print("Request error, url not found, please check your link, exiting...")
raise SystemExit(e)
print("visiting {}".format(url))
return file.content.decode("utf-8")
def rules_dl():
print("Fetching rules...")
full_str = file_dl(RULES_URL_FULL).strip("\n").strip("\r") + "\n\n\n"
where_rule = full_str.find("[Host]")
return full_str[where_rule:]
def sub_dl(url):
print("Fetching sub...")
sub_conf_content = file_dl(url)
if "<html>" or "DOCTYPE" in sub_conf_content:
raise SystemExit("Route error, page not found, please check your link, exiting...")
if "[Proxy]" not in sub_conf_content or "[Proxy Group]" not in sub_conf_content:
# print(sub_conf_content)
raise SystemExit("File parsing error, [Proxy]/[Proxy Group] not found, please check your link, exiting...")
return sub_conf_content
def local_dl():
p = ""
with open(LOCAL_CONF, "r") as f:
p = f.read()
return p
def sub_url_reader():
sub_url = ""
if not os.access(TMP_DIR, os.F_OK):
print("TMP dir not found, creating...")
os.makedirs(TMP_DIR)
else:
print("TMP dir found")
if not os.access(SUB_URL_FILE, os.F_OK):
print("SUB_URL_FILE not found, creating...")
file = open(SUB_URL_FILE, 'w')
file.close()
print("SUB_URL_FILE created at \"{}\"".format(SUB_URL_FILE))
raise SystemExit("Pls add your sub link to the first line of SUB_FILE, then restart this script.\nExiting...")
else:
print("SUB_URL_FILE found")
if not os.access(SUB_URL_FILE, os.R_OK):
raise SystemExit("SUB_FILE read error")
else:
print("SUB_URL_FILE reading...")
f = open(SUB_URL_FILE, "r")
sub_url = sub_url.join(f.readline().strip("\n").strip("\r"))
if len(sub_url) == 0:
raise SystemExit("SUB_URL_FILE no content, pls check your file")
print("SUB_URL_FILE url is: {}".format(sub_url))
return sub_url
def parse_sub(sub_conf_content: str):
parse_dict = {
"[General]": sub_conf_content.find("[General]"),
"[Replica]": sub_conf_content.find("[Replica]"),
"[Proxy]": sub_conf_content.find("[Proxy]"),
"[Proxy Group]": sub_conf_content.find("[Proxy Group]"),
"[Rule]": sub_conf_content.find("[Rule]"),
"[Host]": sub_conf_content.find("[Host]"),
"[URL Rewrite]": sub_conf_content.find("[URL Rewrite]"),
"[Header Rewrite]": sub_conf_content.find("[Header Rewrite]"),
"[MITM]": sub_conf_content.find("[MITM]"),
"[Script]": sub_conf_content.find("[Script]")
}
nan = [k for k, v in parse_dict.items() if v == -1]
for k in nan:
del parse_dict[k]
parse_dict = sorted(parse_dict.items(), key=lambda d: d[1])
ProxyGroup = ""
Proxy = ""
for i in range(len(parse_dict)):
if parse_dict[i][0] == "[Proxy]":
if i + 1 != len(parse_dict):
Proxy = sub_conf_content[parse_dict[i][1]:parse_dict[i + 1][1]]
else:
Proxy = sub_conf_content[parse_dict[i][1]:]
if parse_dict[i][0] == "[Proxy Group]":
if i + 1 != len(parse_dict):
ProxyGroup = sub_conf_content[parse_dict[i][1]:parse_dict[i + 1][1]]
else:
ProxyGroup = sub_conf_content[parse_dict[i][1]:]
if Proxy == "" or ProxyGroup == "":
raise SystemExit("SUB_FILE parse error, no [Proxy] or [Proxy Group], exiting...")
ProxyGroup = ProxyGroup[ProxyGroup.find("select,") + 7:].strip(" ")
ProxyGroup = ProxyGroup[:ProxyGroup.find("\n")]
return Proxy.strip("\n"), ProxyGroup.strip("\n")
def main():
f = open("general.conf", "r")
General = f.read().strip("\n")
f.close()
# sub_url = sub_url_reader()
sub_conf_content = local_dl()
# sub_conf_content = sub_dl(sub_url)
Proxy, proxy_list = parse_sub(sub_conf_content)
f = open("proxy_group.conf", "r")
ProxyGroup = f.read().replace("{%%}", proxy_list)
f.close()
f = open("rule.conf", "r")
Rule = f.read()
f.close()
After = rules_dl().strip("\n")
print("Integrating...")
Final = "\n\n\n".join([General, Proxy, ProxyGroup, Rule, After]) + "\n"
open(FULL_CONF, 'w').write(Final)
print("Done @ {}".format(FULL_CONF))
if __name__ == '__main__':
main()