-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathget_proxy.py
More file actions
46 lines (27 loc) · 1004 Bytes
/
get_proxy.py
File metadata and controls
46 lines (27 loc) · 1004 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import requests
from bs4 import BeautifulSoup
from random import choice
def get_proxy():
html = requests.get('https://free-proxy-list.net/').text
soup = BeautifulSoup(html, 'lxml')
trs = soup.find('table', id='proxylisttable').find_all('tr')[1:11]
proxies = []
for tr in trs:
tds = tr.find_all('td')
ip = tds[0].text.strip()
port = tds[1].text.strip()
schema = 'https' if 'yes' in tds[6].text.strip() else 'http'
proxy = {'schema': schema, 'address': ip + ':' + port}
proxies.append(proxy)
return choice(proxies)
def get_html(url):
# proxies format {'https': 'ipaddress:5000'}
p = get_proxy() # {'schema': '', 'address': ''}
proxy = { p['schema']: p['address'] }
r = requests.get(url, proxies=proxy, timeout=5)
return r.json()['origin']
def main():
url = 'http://httpbin.org/ip'
print(get_html(url))
if __name__ == '__main__':
main()