一、
import urllib2
if __name__ == '__main__' :
url = 'http://www.baidu.com'
proxy = {'http':'124.235.181.175:80'}
proxy_support = urllib2.ProxyHandler(proxy)
opener = urllib2.build_opener(proxy_support)
opener.addheaders = [('User-Agent','Mozilla/5.0 (Linux; Android 4.0.4; Galaxy Nexus Build/IMM76B) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.133 Mobile Safari/535.19')]
urllib2.install_opener(opener) # 用install_opener将配置好的opener安装到全局环境中,这样所有的urllib2.urlopen都会自动使用代理
response = urllib2.urlopen(url)
html = response.read().decode('GBK')
print html
二、
import urllib2
if __name__ == '__main__' :
url = 'http://www.baidu.com'
proxy = {'http':'124.235.181.175:80'}
proxy_handler = urllib2.ProxyHandler(proxy)
opener = urllib2.build_opener(proxy_handler)
req = opener.open(url) # 只有使用opener.open()方法发送请求才使用自定义的代理,而urlopen()则不使用自定义代理
html = req.read().decode('utf-8')
print html
三、
import urllib2
import random
if __name__ == '__main__' :
url = 'http://www.baidu.com/'
proxy_list = [
{'http' : '124.88.67.81:80'},
{'http' : '124.235.181.175:80'},
{'http' : '123.53.134.254:8010'},
{'http' : '61.135.217.7:80'},
{'http' : '116.77.204.2:80'}
]
proxy = random.choice(proxy_list) # 随机选择一个代理
proxy_handler = urllib2.ProxyHandler(proxy) # 使用选择的代理构建代理处理器对象
opener = urllib2.build_opener(proxy_handler)
opener.addheaders = [('User-Agent','Mozilla/5.0 (Linux; Android 4.0.4; Galaxy Nexus Build/IMM76B) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.133 Mobile Safari/535.19')]
urllib2.install_opener(opener)
req = urllib2.Request(url)
response = urllib2.urlopen(req)
html = response.read().decode('utf-8')
print html
免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。