requests设置代理 import requests proxy = \'127.0.0.1:9743\' proxies = { \'http\': \'http://\' + proxy, \'https\': \'https://\' + proxy, } try: response = requests.get(\'http://httpbin.org/get\', proxies=proxies) print(response.text) except requests.exceptions.ConnectionError as e: print(\'Error\', e.args) selenium设置代理 from selenium import webdriver proxy = \'127.0.0.1:9743\' chrome_options = webdriver.ChromeOptions() chrome_options.add_argument(\'--proxy-server=http://\' + proxy) browser = webdriver.Chrome(chrome_options=chrome_options) browser.get(\'http://httpbin.org/get\') scrapy设置代理 class ProxyMiddleware(object): def get_random_proxy(self): # 自己维护在本地的IP代理池 try: proxy = requests.get(\'http://127.0.0.1:5555/random\') if proxy.status_code == 200: return proxy.text except: return None def process_request(self, request, spider): proxy = self.get_random_proxy() if proxy: request.meta[\'proxy\'] = proxy
设置IP代理池
内容版权声明:除非注明,否则皆为本站原创文章。