20
20
import requests
21
21
import urllib3
22
22
23
- USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " \
24
- "Chrome/77.0.3865.75 Safari/537.36 "
25
23
26
24
# 去除警告
25
+ import constants
26
+
27
27
requests .packages .urllib3 .disable_warnings ()
28
28
# 如果请求失败默认重试次数
29
29
requests .adapters .DEFAULT_RETRIES = 5
30
30
31
31
32
+ def get_session ():
33
+ """
34
+ 获取session
35
+ :param url:请求地址
36
+ :param data:数据,map或dict格式
37
+ :return:
38
+ """
39
+ session = requests .sessions .Session ()
40
+ # 关闭多余的连接
41
+ session .keep_alive = False
42
+ session .headers ["User-Agent" ] = constants .USER_AGENT
43
+ session .verify = False
44
+ session .timeout = 600
45
+ return session
46
+
47
+
32
48
def get (url , data = None ):
33
49
"""
34
50
get请求
@@ -39,7 +55,7 @@ def get(url, data=None):
39
55
session = requests .sessions .Session ()
40
56
# 关闭多余的连接
41
57
session .keep_alive = False
42
- return session .get (url , params = data , headers = {"User-Agent" : USER_AGENT }, verify = False , timeout = 600 )
58
+ return session .get (url , params = data , headers = {"User-Agent" : constants . USER_AGENT }, verify = False , timeout = 600 )
43
59
44
60
45
61
def post (url , data ):
@@ -49,7 +65,7 @@ def post(url, data):
49
65
:param data:数据,map或dict格式
50
66
:return:
51
67
"""
52
- return requests .post (url , data , headers = {"User-Agent" : USER_AGENT }, verify = False , timeout = 600 )
68
+ return requests .post (url , data , headers = {"User-Agent" : constants . USER_AGENT }, verify = False , timeout = 600 )
53
69
54
70
55
71
def delete (url , data ):
@@ -59,7 +75,7 @@ def delete(url, data):
59
75
:param data:数据,map或dict格式
60
76
:return:
61
77
"""
62
- return requests .delete (url = url , params = data , headers = {"User-Agent" : USER_AGENT }, verify = False , timeout = 600 )
78
+ return requests .delete (url = url , params = data , headers = {"User-Agent" : constants . USER_AGENT }, verify = False , timeout = 600 )
63
79
64
80
65
81
def get_json (url , data ):
@@ -101,7 +117,7 @@ def download_big_file_urlib(url, mkdir, name=""):
101
117
102
118
req = Request (url )
103
119
# 增加header头信息
104
- req .add_header ('User-Agent' , USER_AGENT )
120
+ req .add_header ('User-Agent' , constants . USER_AGENT )
105
121
106
122
response = urlopen (req )
107
123
while True :
@@ -143,7 +159,7 @@ def download_big_file(url, mkdir, name=""):
143
159
name = os .path .join (mkdir , name )
144
160
145
161
start_time = time .time ()
146
- req = requests .get (url , stream = True , headers = {"User-Agent" : USER_AGENT }, verify = False )
162
+ req = requests .get (url , stream = True , headers = {"User-Agent" : constants . USER_AGENT }, verify = False )
147
163
with req as r :
148
164
content_length = int (r .headers ['content-length' ])
149
165
print (name , 'content-length: %dB/%.2fKB/%.2fMB' % (
@@ -190,7 +206,7 @@ def download_file(url, mkdir, name=""):
190
206
# 判断文件是否存在
191
207
# if not os.path.exists(name):
192
208
if not os .path .isfile (name ):
193
- with requests .get (url , headers = {"User-Agent" : USER_AGENT }, verify = False , timeout = 600 ) as req :
209
+ with requests .get (url , headers = {"User-Agent" : constants . USER_AGENT }, verify = False , timeout = 600 ) as req :
194
210
with open (name , "wb" ) as f :
195
211
f .write (req .content )
196
212
return name
0 commit comments