#Import
from urllib.request import ProxyDigestAuthHandler, Request, urlopen
from bs4 import BeautifulSoup
from bs4.element import SoupStrainer
import requests
#PreInit
PROXY = {"https": "54.93.88.15:9300", "https": "165.22.81.30:38244", "https": "207.154.205.135:9999", "https": "88.198.26.145:8080", "https": "144.91.86.144:3128"}
url = "https://www.ebay.de/b/Laptops-Notebooks/175672/bn_1618754?LH_ItemCondition=7000&mag=1&rt=nc&_dmd=1&_sop=1"
req = Request(url, headers={"User-Agent": "Mozilla/5.0"}, proxies=PROXY)
webpage = urlopen(req).read()
#Init
with requests.Session() as c:
all_data = []
#Web init
soup = BeautifulSoup(webpage, "html5lib")
#Data collection...
print(shipping.text.strip())
上面的事情是运行脚本时发生的错误的相关部分。我在哪里可以合并“代理”价值? 帮助赞赏
# Note assigning more than one value to a key in a dict will result in only the final value being assigned to the key
PROXY = {"https": "54.93.88.15:9300"}
url = "https://www.ebay.de/b/Laptops-Notebooks/175672/bn_1618754?LH_ItemCondition=7000&mag=1&rt=nc&_dmd=1&_sop=1"
req = Request(url, headers={"User-Agent": "Mozilla/5.0"})
# Create proxy ProxyHandler
proxy_support = urllib.request.ProxyHandler(PROXY)
# Create opener
opener = urllib.request.build_opener(proxy_support)
# Install opener
urllib.request.install_opener(opener)
webpage = urllib.request.urlopen(req)
DewanGradeHTTPX至版本0.27.2,以便仍然支持“代理”关键字。尝试:pip install httpx==0.27.2