HTTP隧道 (动态版)
Python 接入指南

隧道验证方式

地址:http-dyn.abuyun.com,端口:9020

隧道验证方式

HTTP隧道拥有两种授权模式:

  • 请求头(默认)

通过用户名/密码的形式进行身份认证,该认证信息最终会转换为『Proxy-Authorization』协议头跟随请求一起发出。

为便于部分语言进行接入,平台亦支持通过『Authorization』协议头进行隧道身份验证。

  • 绑定IP(暂未启用)

只须绑定用户发起请求的服务器IP即可。

一条代理隧道只能绑定一个IP,同一IP可以分别绑定到专业版、动态版、经典版代理隧道各一条。

              
    # -*-*-
    # 感谢骚男 『│網亊隨楓︵ (QQ: 332110637)』 提供的源代码
    # -*-*-

    #! -*- encoding:utf-8 -*-

    from urllib import request

    # 要访问的目标页面
    targetUrl = "http://test.abuyun.com"

    # 代理服务器
    proxyHost = "http-dyn.abuyun.com"
    proxyPort = "9020"

    # 代理隧道验证信息
    proxyUser = "H01234567890123D"
    proxyPass = "0123456789012345"

    proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % {
        "host" : proxyHost,
        "port" : proxyPort,
        "user" : proxyUser,
        "pass" : proxyPass,
    }

    proxy_handler = request.ProxyHandler({
        "http"  : proxyMeta,
        "https" : proxyMeta,
    })

    #auth = request.HTTPBasicAuthHandler()
    #opener = request.build_opener(proxy_handler, auth, request.HTTPHandler)

    opener = request.build_opener(proxy_handler)

    request.install_opener(opener)
    resp = request.urlopen(targetUrl).read()

    print (resp)              
            
              
    # -*-*-
    # 感谢骚男 『zh (QQ: 315393472)』 提供的源代码
    # -*-*-

    #! -*- encoding:utf-8 -*-

    import urllib2

    # 要访问的目标页面
    targetUrl = "http://test.abuyun.com"

    # 代理服务器
    proxyHost = "http-dyn.abuyun.com"
    proxyPort = "9020"

    # 代理隧道验证信息
    proxyUser = "H01234567890123D"
    proxyPass = "0123456789012345"

    proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % {
      "host" : proxyHost,
      "port" : proxyPort,
      "user" : proxyUser,
      "pass" : proxyPass,
    }

    proxy_handler = urllib2.ProxyHandler({
        "http"  : proxyMeta,
        "https" : proxyMeta,
    })

    opener = urllib2.build_opener(proxy_handler)

    urllib2.install_opener(opener)
    resp = urllib2.urlopen(targetUrl).read()

    print resp              
            
              
    # -*-*-
    # 感谢骚男 『zh (QQ: 315393472)』 提供的源代码
    # -*-*-

    #! -*- encoding:utf-8 -*-

    import requests

    # 要访问的目标页面
    targetUrl = "http://test.abuyun.com"
    #targetUrl = "http://proxy.abuyun.com/switch-ip"
    #targetUrl = "http://proxy.abuyun.com/current-ip"

    # 代理服务器
    proxyHost = "http-dyn.abuyun.com"
    proxyPort = "9020"

    # 代理隧道验证信息
    proxyUser = "H01234567890123D"
    proxyPass = "0123456789012345"

    proxyMeta = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % {
      "host" : proxyHost,
      "port" : proxyPort,
      "user" : proxyUser,
      "pass" : proxyPass,
    }

    proxies = {
        "http"  : proxyMeta,
        "https" : proxyMeta,
    }

    resp = requests.get(targetUrl, proxies=proxies)

    print resp.status_code
    print resp.text              
            
              
    # -*-*-
    # 感谢骚男 『挖掘机强森 (QQ: 615918332)』 指正
    # -*-*-

    #! -*- encoding:utf-8 -*-

    import base64

    # 代理服务器
    proxyServer = "http://http-dyn.abuyun.com:9020"

    # 代理隧道验证信息
    proxyUser = "H01234567890123D"
    proxyPass = "0123456789012345"

    # for Python2
    proxyAuth = "Basic " + base64.b64encode(proxyUser + ":" + proxyPass)

    # for Python3
    #proxyAuth = "Basic " + base64.urlsafe_b64encode(bytes((proxyUser + ":" + proxyPass), "ascii")).decode("utf8")

    class ProxyMiddleware(object):
        def process_request(self, request, spider):
            request.meta["proxy"] = proxyServer
            # 适配 scrapy 2.6.2+
            request.meta["_auth_proxy"] = proxyServer

            request.headers["Proxy-Authorization"] = proxyAuth
            request.headers["Connection"] = "close"              
            
              
    #! -*- encoding:utf-8 -*-

    import aiohttp, asyncio

    # 要访问的目标页面
    targetUrl = "http://test.abuyun.com"

    # 代理服务器
    proxyHost = "http-dyn.abuyun.com"
    proxyPort = "9020"

    # 代理隧道验证信息
    proxyUser = "H01234567890123D"
    proxyPass = "0123456789012345"

    proxyServer = "http://%(user)s:%(pass)s@%(host)s:%(port)s" % {
        "host" : proxyHost,
        "port" : proxyPort,
        "user" : proxyUser,
        "pass" : proxyPass,
    }

    userAgent = "curl/7.x/line"

    async def entry():
        conn = aiohttp.TCPConnector(verify_ssl=False)

        async with aiohttp.ClientSession(headers={"User-Agent": userAgent}, connector=conn) as session:
            async with session.get(targetUrl, proxy=proxyServer) as resp:
                body = await resp.read()

                print(resp.status)
                print(body)

    loop = asyncio.get_event_loop()
    loop.run_until_complete(entry())
    loop.run_forever()              
            
体验服务
现在开始,体验阿布云大数据服务
计算、网络、大数据、人工智能,阿布云助您飞跃发展