Web scraping a betting site

Hello there,
I’m pretty new to web scraping and I’m really stuck on getting it look like what I want.

1 Like

What is your exact problem? Do you have any code so far?

1 Like

import time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException, WebDriverException
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from fake_useragent import UserAgent
# Creating a user agent instance
ua = UserAgent()
user_agent = ua.random
def selenium_scraper(url):
    # Selenium Options
    options = Options()
    options.add_argument('--headless')
    options.add_argument('--disable-gpu')
    options.add_argument('--no-sandbox') 
    options.add_argument('window-size=1920x1080') # If screen is too small, key values will not be extractable
    options.add_argument(f'user-agent={user_agent}')
    seconds = 50
    path = r'C:\Dev\drivers\chromedriver'
    
    try:
        driver = webdriver.Chrome(path, options=options) # You need to download chromedriver (MAke sure to get the one for your OS)
        driver.implicitly_wait(seconds) # The sites are very slow to load
        wait = WebDriverWait(driver, 50)
        content = []
        odds = []
        i = 0
        while i < len(url):
            try:
                driver.get(url[i])
                
            except WebDriverException:
                continue

            if i == 0:
                d = []
                link = driver.find_elements_by_css_selector('div.item.ng-scope')
                link[2].click()
                body = driver.find_elements_by_css_selector('div.datiSE')  # fetching the main div
                bod = driver.find_elements_by_css_selector('div.style_col__ncUvn style_colMarkets__2zo1K.style_col3Buttons__1IZWF')
                books = driver.find_elements_by_css_selector('a.sottoevento.ng-binding')  # fetching the games
                value = driver.find_elements_by_css_selector('div.odd')  # fetching the odds
                count = 0
                for b in books:
                    content.append(b.text)
                for v in value:
                    vz = v.find_elements_by_css_selector('div.QuotaValore')
                    for vs in vz:
                        d.append(vs.text)
                    # l = len(d)
                    # for odd in range(0, l, 7):
                    #     odds.append(d[odd])
                    #     odds.append(d[odd+1])
                odds.append(d)

                # print(f'Scraped: {content} - Odds: {odds}')
                print("================ 9jaBet =========================")
                print("Games - ", content, "\n"*2)
                print("Odds - ", odds, "\n"*2)
            if i == 1:
                print("=============== Pinnacle Bet =====================")
                value = driver.find_elements_by_css_selector('span.ellipsis')  # fetching the only element we need
                value1 = driver.find_element_by_css_selector('span.ellipsis').text  # fetching the only element we need
                value2 = driver.find_element_by_css_selector('span.ellipsis').text  # fetching the only element we need
                
                for v in value:
                    print(f'Scraped: {v.text}')
            i += 1
        driver.quit()
    except NoSuchElementException as n:
        print('Element not found :(')
        driver.quit()
        pass
print(selenium_scraper(['https://web.bet9ja.com/Sport/Default.aspx', 'https://www.pinnacle.com/en/']))

And what is the exact problem?

1 Like

I’m not getting what i need.
for each games I want just 2/3 odds not everything.
I’m using base on css selector so it just brings up all

1 Like