设为首页 加入收藏

TOP

爬虫必备—requests(三)
2017-09-30 15:29:49 】 浏览:6123
Tags:爬虫 必备 requests
s():
100 # 发送文件 101 # file_dict = { 102 # 'f1': open('readme', 'rb') 103 # } 104 # requests.request(method='POST', 105 # url='http://127.0.0.1:8000/test/', 106 # files=file_dict) 107 108 # 发送文件,定制文件名 109 # file_dict = { 110 # 'f1': ('test.txt', open('readme', 'rb')) 111 # } 112 # requests.request(method='POST', 113 # url='http://127.0.0.1:8000/test/', 114 # files=file_dict) 115 116 # 发送文件,定制文件名 117 # file_dict = { 118 # 'f1': ('test.txt', "hahsfaksfa9kasdjflaksdjf") 119 # } 120 # requests.request(method='POST', 121 # url='http://127.0.0.1:8000/test/', 122 # files=file_dict) 123 124 # 发送文件,定制文件名 125 # file_dict = { 126 # 'f1': ('test.txt', "hahsfaksfa9kasdjflaksdjf", 'application/text', {'k1': '0'}) 127 # } 128 # requests.request(method='POST', 129 # url='http://127.0.0.1:8000/test/', 130 # files=file_dict) 131 132 pass 133 134 135 def param_auth(): 136 from requests.auth import HTTPBasicAuth, HTTPDigestAuth 137 138 ret = requests.get('https://api.github.com/user', auth=HTTPBasicAuth('wupeiqi', 'sdfasdfasdf')) 139 print(ret.text) 140 141 # ret = requests.get('http://192.168.1.1', 142 # auth=HTTPBasicAuth('admin', 'admin')) 143 # ret.encoding = 'gbk' 144 # print(ret.text) 145 146 # ret = requests.get('http://httpbin.org/digest-auth/auth/user/pass', auth=HTTPDigestAuth('user', 'pass')) 147 # print(ret) 148 # 149 150 151 def param_timeout(): 152 # ret = requests.get('http://google.com/', timeout=1) 153 # print(ret) 154 155 # ret = requests.get('http://google.com/', timeout=(5, 1)) 156 # print(ret) 157 pass 158 159 160 def param_allow_redirects(): 161 ret = requests.get('http://127.0.0.1:8000/test/', allow_redirects=False) 162 print(ret.text) 163 164 165 def param_proxies(): 166 # proxies = { 167 # "http": "61.172.249.96:80", 168 # "https": "http://61.185.219.126:3128", 169 # } 170 171 # proxies = {'http://10.20.1.128': 'http://10.10.1.10:5323'} 172 173 # ret = requests.get("http://www.proxy360.cn/Proxy", proxies=proxies) 174 # print(ret.headers) 175 176 177 # from requests.auth import HTTPProxyAuth 178 # 179 # proxyDict = { 180 # 'http': '77.75.105.165', 181 # 'https': '77.75.105.165' 182 # } 183 # auth = HTTPProxyAuth('username', 'mypassword') 184 # 185 # r = requests.get("http://www.google.com", proxies=proxyDict, auth=auth) 186 # print(r.text) 187 188 pass 189 190 191 def param_stream(): 192 ret = requests.get('http://127.0.0.1:8000/test/', stream=True) 193 print(ret.content) 194 ret.close() 195 196 # from contextlib import closing 197 # with closing(requests.get('http://httpbin.org/get', stream=True)) as r: 198 # # 在此处理响应。 199 # for i in r.iter_content(): 200 # print(i) 201 202 203 def requests_session(): 204 import requests 205 206 session = requests.Session() 207 208 ### 1、首先登陆任何页面,获取cookie 209 210 i1 = session.get(url="http://dig.chouti.com/help/service") 211 212 ### 2、用户登陆,携带上一次的cookie,后台对cookie中的 gpsd 进行授权 213 i2 = session.post( 214 url="http://dig.chouti.com/login", 215 data={ 216 'phone': "8615131255089", 217 'password': "xxxxxx", 218 'oneMonth': "" 219 } 220 ) 221 222 i3 = session.post( 223 url="http://dig.chouti.com/link/vote?linksId=8589623", 224 ) 225 p
首页 上一页 1 2 3 4 下一页 尾页 3/4/4
】【打印繁体】【投稿】【收藏】 【推荐】【举报】【评论】 【关闭】 【返回顶部
上一篇Python网络爬虫与信息提取(一) 下一篇32、进程池与回调函数

最新文章

热门文章

Hot 文章

Python

C 语言

C++基础

大数据基础

linux编程基础

C/C++面试题目