• requests
  • urllib
  • Scrapy
  • aiohttp

    import requests

    target_url = "http://httpbin.org/ip"
    proxy_host = 'http-dynamic.xiaoxiangdaili.com'
    proxy_port = 10030
    proxy_username = '应用id(后台-产品管理-隧道代理页面可查)'
    proxy_pwd = '应用密码(后台-产品管理-隧道代理页面可查)'

    proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % {
        "host": proxy_host,
        "port": proxy_port,
        "user": proxy_username,
        "pass": proxy_pwd,
    }

    proxies = {
        'http': proxyMeta,
        'https': proxyMeta,
    }

    try:
        resp = requests.get(url=target_url, proxies=proxies)
        print(resp.text)
    except Exception as e:
       print e


    from urllib import request

    # 要访问的目标页面
    targetUrl = "http://httpbin.org/ip"

    # 代理服务器
    proxyHost = "http-dynamic.xiaoxiangdaili.com"
    proxyPort = "10030"

    # 代理隧道验证信息
    proxyUser = "应用id(后台-产品管理-隧道代理页面可查)"
    proxyPass = "应用密码(后台-产品管理-隧道代理页面可查)"

    proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % {
        "host": proxyHost,
        "port": proxyPort,
        "user": proxyUser,
        "pass": proxyPass,
    }

    proxy_handler = request.ProxyHandler({
        "http": proxyMeta,
        "https": proxyMeta,
    })

    opener = request.build_opener(proxy_handler)

    request.install_opener(opener)
    resp = request.urlopen(targetUrl).read()

    print(resp)


    ### 本示例通过自定义中间件的方式实现代理IP的设置。
    ### 注意:需要在项目的settings.py文件中启用本中间件。
		
    import base64

    proxyUser = "应用id(后台-产品管理-隧道代理页面可查)"
    proxyPass = "应用密码(用户中心-隧道代理订单页面可查)"
    proxyHost = "http-dynamic.xiaoxiangdaili.com"
    proxyPort = "10030"

    proxyServer = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % {
        "host" : proxyHost,
        "port" : proxyPort,
        "user": proxyUser,
        "pass": proxyPass
    }
		
    class ProxyMiddleware(object):
        def process_request(self, request, spider):
            request.meta["proxy"] = proxyServer
            request.headers["Connection"] = "close"

    """
    使用aiohttp请求隧道服务器
    请求http和https网页均适用
    
    """
    import asyncio
    import aiohttp
    import requests
    
    # 应用账号(请替换为真实账号)
    app_key = ''
    # 应用密码(请替换为真实密码)
    app_secret = ''
    # 用户名密码认证
    proxy_auth = aiohttp.BasicAuth(app_key, app_secret)
    # 要访问的目标网页
    target_url = "http://httpbin.org/ip"
    # 隧道域名
    proxy_host = 'http-dynamic.xiaoxiangdaili.com'
    # 端口号
    proxy_port = '10030'
    
    
    async def fetch(session, url):
    	async with session.get(url, proxy="http://" + proxy_host + ":" + proxy_port, proxy_auth=proxy_auth,) as response:
    		return await response.text()
    
    
    async def run():
    	# aiohttp默认使用严格的HTTPS协议检查。可以通过将ssl设置为False来放松认证检查
    	# async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session:
    	async with aiohttp.ClientSession() as session:
    		html = await fetch(session, target_url)
    		print(html)
    
    
    if __name__ == '__main__':
    	loop = asyncio.get_event_loop()
    	loop.run_until_complete(run())