用硒、PhantomJS和BS4刮擦

2024-06-26 13:27:55 发布

您现在位置:Python中文网/ 问答频道 /正文

我目前正在使用Windows10和Python3.7,我一直在阅读关于如何在不打开1个Firefox浏览器窗口的情况下对urls列表中的每个正在被刮取的URL进行刮取。下面的代码抛出了一个错误,我确信这与PhantomJS是如何实现的有关,我只是不知道具体是什么

我读过PhantomJS与Selenium一起使用时是一种解决方案。我安装了PJS,在我的计算机上设置了路径,它似乎正在运行,但是我不完全确定如何在代码中实现它

driver = webdriver.PhantomJS(executable_path=r"C:\phantomjs")行是尝试运行PJS的行。在使用driver = webdriver.Firefox()之前,代码运行良好

urls = ["https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=0&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=90&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=180&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=270&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=360&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=450&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=540&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=630&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=720&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=810&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=900&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD","https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=990&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD"]
#url = "https://www.guitarcenter.com/Used/Bass.gc#pageName=used-page&N=18171+1076&Nao=180&recsPerPage=90&postalCode=02494&radius=100&profileCountryCode=US&profileCurrencyCode=USD"

user_agent = UserAgent()

#make csv file
csv_file = open("gcscrape.csv", "w", newline='') #added the newline thing on 5.17.20 to try to stop blank lines from writing
csv_writer = csv.writer(csv_file)
csv_writer.writerow(["bass_name","bass_price"])

for url in urls:
    web_r = requests.get(url)
    web_soup = BeautifulSoup(web_r.text,"html.parser")

        #print(web_soup.findAll("li", class_="product-container")) #finding all of the grid items on the url above - price, photo, image, details and all
        #print(len(web_soup.findAll("li", class_="product-container"))) #printing out the length of the

    #driver = webdriver.Firefox()
    driver = webdriver.PhantomJS(executable_path=r"C:\phantomjs")
    driver.get(url)
    html = driver.execute_script("return document.documentElement.outerHTML") #whats inside of this is a javascript call to get the outer html content of the page
    sel_soup = BeautifulSoup(html, "html.parser")

    for content in sel_soup.findAll("li", class_="product-container"):
            #print(content)

        bass_name = content.find("div", class_="productTitle").text.strip() #pulls the bass guitar name
        print(bass_name)

        prices_new = []
        for i in content.find("span", class_="productPrice").text.split("$"):
            prices_new.append(i.strip())
        bp = prices_new[1]
        print(bp)

        #write row to new csv file
        csv_writer.writerow([bass_name, bp])

Tags: httpscomwwwpageusedusradiusnao