How to scroll to the end Selenium Python - selenium

I'm trying to scroll to the end in this page url
When got into the page, I click the button 'Show all 77 products' I got into a popup that shows partially the elements into the popup. Actually this is my code:
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
def getpage(driver):
driver.get('https://www.binance.com/it/pos')
sleep(3)
driver.find_element_by_xpath('//div[#id="savings-lending-pos-expend"]').click()
sleep(2)
elem = driver.find_element_by_xpath('//div[#class="css-n1ers"]')
elem.send_keys(Keys.END)
driver = webdriver.Firefox()
getpage(driver)
I have tried almost everything to work, apart from the solution in the code above, I tried with nu success the current solutions:
driver.execute_script("window.scrollTo(0, Y)")
and
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
and in this solutions I didn't understand which label to use
label.sendKeys(Keys.PAGE_DOWN);
I tried almost all solutions but none worked. I hope you can help me. Thank you.

Try like below and confirm:
You can try to find each row and apply scrollIntoView for the same.
# Imports required for Explicit wait:
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
def getpage(driver):
driver.get('https://www.binance.com/it/pos')
wait = WebDriverWait(driver,30)
wait.until(EC.element_to_be_clickable((By.ID,"savings-lending-pos-expend"))).click() // show more option
i = 0
try:
while True:
options = driver.find_elements_by_xpath("//div[#id='modal-wrapper']/div") // Try to find rows.
driver.execute_script("arguments[0].scrollIntoView(true);",options[i])
time.sleep(1)
i+=1
except IndexError as e:
print("Exception: {}".format(e.args[-1]))
print(i)
getpage(driver)

You can use ActionChains
from selenium.webdriver.common.action_chains import ActionChains
actions = ActionChains(driver)
actions.send_keys(Keys.PAGE_DOWN).perform()
That will make the page scroll down similar to pressing the Page Down key.

This solution worked:
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from time import sleep
def getpage(driver):
driver.get('https://www.binance.com/it/pos')
wait = WebDriverWait(driver,30)
wait.until(EC.element_to_be_clickable((By.ID,"savings-lending-pos-expend"))).click()
i = 0
sleep(5)
pop_up_window = WebDriverWait(driver, 2).until(EC.element_to_be_clickable((By.XPATH, "//div[#id='modal-scroller']")))
while True:
driver.execute_script('arguments[0].scrollTop = arguments[0].scrollTop + arguments[0].offsetHeight;', pop_up_window)
sleep(1)
driver = webdriver.Firefox()
getpage(driver)

Related

Selenium: how to see full DOM structure?

I've seen similar post about this question with the resolution being using the WebDriverWait. But I still kept getting an exception error saying the selector is not present.
Even when I printed driver.execute_script("return document.body.innerHTML;" at the end of my code the full DOM didn't show up, it looks exactly like the page source, but I need the rest of the HTML's elements
from multiprocessing.connection import wait
from ntpath import join
import os
from xml.dom.minidom import Element
from selenium import webdriver
from selenium.webdriver.common.by import By ## Used for grabbing elements by
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
import time
os.environ['PATH'] += ";C:\seleniumDrivers"
chrome = webdriver.Chrome()
chrome.get("https://jklm.fun/XSNM")
chrome.implicitly_wait(10)
name = chrome.find_element(By.CLASS_NAME, "nickname")
name.clear()
name.send_keys("Mr.Roboto")
btn = chrome.find_element(By.XPATH, '/html/body/div[2]/div[3]/form/div[2]/button')
btn.click()
join_btn = WebDriverWait(chrome, 1000000).until(EC.presence_of_element_located(
chrome.find_element(
By.XPATH, '/html/body/div[2]/div[3]/div[1]/div[1]/button')))
#join_btn = chrome.find_element(By.XPATH, '/html/body/div[2]/div[3]/div[1]/div[1]/button')
#join_btn = chrome.find_element(By.CSS_SELECTOR, 'button[data-text="joinGame"')
join_btn.click()
Element was in an iframe. I used chrome.switch_to.frame()

Selenium won't click on all buttons only a few

I'm trying to click the buttons on the left-panel of the web-page I'm trying to scrape. However, by using Selenium, it seems to only click only on a few of these buttons. I have added a time.sleep between each click which did not make a difference.
I just get the following error:
NoSuchElementException: Message: no such element: Unable to locate element: {"method":"xpath","selector":"(//div[#class='toggle-bottom-filter'])[7]"}
I have double checked that path and it does exist on the website. So I'm not certain as to why it's undetectable, any ideas?
Here's my script:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
driver = webdriver.Chrome()
driver.get("https://www.theparking.eu/#!/used-cars/")
wait=WebDriverWait(driver,15)
wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#bloc-filter")))
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR,"div[class='sd-cmp-25TOo'] span[class='sd-cmp-16t61 sd-cmp-2JYyd sd-cmp-3cRQ2']"))).click()
#WebDriverWait(driver, 10).until(EC.frame_to_be_available_and_switch_to_it((By.XPATH,"//iframe[#title='Electric car drivers will soon no longer be able to charge their cars here.']")))
stuff = []
for more in range(1, 9):
time.sleep(2)
driver.find_element(By.XPATH, f"(//div[#class='toggle-bottom-filter'])[{more}]").click()
data = driver.page_source
# ... parse with beautifulsoup
Not all the 9 elements you are trying to click are initially visible, you first will have to scroll the element into the view and only after that click them.
I see there are 11 elements matching //div[#class='toggle-bottom-filter'] locator there so possibly you should change the for loop to for more in range(1, 12):
I think the following code should work better:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
import time
driver = webdriver.Chrome()
driver.get("https://www.theparking.eu/#!/used-cars/")
wait=WebDriverWait(driver,15)
actions = ActionChains(driver)
wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#bloc-filter")))
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR,"div[class='sd-cmp-25TOo'] span[class='sd-cmp-16t61 sd-cmp-2JYyd sd-cmp-3cRQ2']"))).click()
#WebDriverWait(driver, 10).until(EC.frame_to_be_available_and_switch_to_it((By.XPATH,"//iframe[#title='Electric car drivers will soon no longer be able to charge their cars here.']")))
stuff = []
for more in range(1, 12):
time.sleep(2)
button = driver.find_element(By.XPATH, f"(//div[#class='toggle-bottom-filter'])[{more}]")
actions.move_to_element(button).perform()
time.sleep(0.5)
button.click()
time.sleep(0.5)
data = driver.page_source
# ... parse with beautifulsoup

ActionChains doesn't work, unless I manually refresh the selenium browser

I'm using selenium, and in the middle of the automation, this code only works if I manually refresh the browser.
If I do driver.get("URL"), or driver.refresh(), it doesn't help.
from selenium.webdriver.common.action_chains import ActionChains
elems = driver.find_elements_by_tag_name('div')
elem = elems[3]
action = ActionChains(driver)
action.click(elem)
action.send_keys("text")
action.perform()
I already can't use elem.send_keys(), it doesn't work anyway or anyhow. Though elem.click() does, that doesn't help if elem.send_keys() doesn't.
I think you can use explicit wait in this case like this :
from selenium.webdriver.common.action_chains import ActionChains
wait = WebDriverWait(driver, 50)
elems = driver.find_elements_by_tag_name('div')
elem = elems[3]
action = ActionChains(driver).move_to_element(elem).perform()
wait.until(EC.visibility_of((elem))).click()
time.sleep(2)
wait.until(EC.visibility_of((elem))).send_keys("text")
Imports :
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC

I want execute button.click() operation once finds value more than "+1.00%" in a webpage using Selenium, python

Guy's please help, I want execute an element.click() operation once element finds an text in a web page more than "+1.00%" value. Below is my code. The code will repeat until finds text more than "+1.00%"
def main():
while True:
try:
element = browser.find_element_by_link_text('+1.00%')
element2 = browser.find_element_by_link_text('Balances')
if element > '+1.00%':
element2.click()
time.sleep(5)
except:
main()
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium import webdriver
driver.get('url')
WebDriverWait(driver, 10000).until(
EC.presence_of_element_located(
(By.XPATH, '//*[contains(text(),"+1.50")]')
)
).click()
use webdriver wait instead,

Cannot click on element

I'm trying to automate browsing through several pages of lists of doctors. The part I'm having difficulty with is how to get selenium to find and click on the right hand arrow that goes to the next pages of 10 doctors.
I've been trying several different stackOverflow potential solutions for the past few days and I'm still stumped.
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
# from selenium.webdriver.common import move_to_element
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.actions import interaction
import time
import sys
browser = webdriver.Chrome('C:/chromedriver.exe')
browser.get('https://connect.werally.com/county-plan-selection/uhc.mnr/zip')
elem_ZipInput = browser.find_element_by_xpath('//*[#id="location"]')
elem_ZipInput.click()
elem_ZipInput.send_keys('80210')
elem_ZipInput.send_keys(Keys.ENTER)
time.sleep(2)
browser.find_element_by_xpath("//button[#track='No Preference']").click()
time.sleep(3)
browser.find_element_by_xpath("//button[#data-test-id='People']").click()
time.sleep(2)
try:
browser.find_element_by_xpath("//button[#data-test-id='Primary Care']").click()
except:
browser.find_element_by_xpath("//button[#data-test-id='PrimaryCare']").click()
time.sleep(2)
try:
browser.find_element_by_xpath("//button[#data-test-id='All Primary Care Physicians']").click()
except:
browser.find_element_by_xpath("//button[#data-test-id='AllPrimaryCarePhysicians']").click()
time.sleep(2)
elem_PCPList_NextPage = browser.find_element_by_xpath("//i[#class='icon icon_arrow_right']")
ProviderPageTab_Overview = browser.find_element_by_xpath("//*[#id='provider.bioTab']")
ProviderPageTab_Overview.click()
time.sleep(2)
# WebDriverWait(browser, 20).until(EC.element_to_be_clickable((By.XPATH, "//compare-providers[#class='navigationHeader visible-phone']/div/div/button[#track='next-page']/icon/i"))).click()
# WebDriverWait(browser, 20).until(EC.element_to_be_clickable((By.CSS_SELECTOR,"div[id='mainContent'] div div header compare-providers[class='navigationHeader visible-phone'] div div button[track='next-page']"))).click()
# WebDriverWait(browser, 20).until(EC.element_to_be_clickable((By.CSS_SELECTOR,"div[id='mainContent'] div div header div[class='navigationHeader hidden-phone'] div div button[track='next-page'] icon"))).click()
wait = WebDriverWait(webdriver, 10)
wait.until(EC.element_to_be_clickable(By.CSS_SELECTOR,"div[id='mainContent'] div div header div[class='navigationHeader hidden-phone'] div div button[track='next-page'] icon"))
# print(browser.find_element_by_xpath("//i[#class='icon icon_arrow_right']"))
# print(browser.find_element_by_xpath("//button[#aria-label='Next Page']"))
next_Provider = browser.find_element_by_xpath("//compare-providers[#class='navigationHeader visible-phone']/div/div/button[#track='next-page']/icon/i")
#print(//compare-providers[#class='navigationHeader visible-phone']/div/div/button[#track='next-page']/icon/i)
# print(browser.find_element_by_xpath("//button[#track='next-page']"))
# print(browser.find_element_by_xpath("//icon[#type=\"'icon_arrow_right'\"]"))
next_Provider.click()
Any suggestions or feedback would really be appreciated!
To click() on the desired element you have to induce WebDriverWait for the desired element_to_be_clickable() and you can use either of the following Locator Strategies:
Using CSS_SELECTOR:
WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.CSS_SELECTOR, "button[track='next-page'][aria-label='Next Page'] i.icon.icon_arrow_right")))
Using XPATH:
WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH, "//button[#track='next-page' and #aria-label='Next Page']//i[#class='icon icon_arrow_right']")))
Note : You have to add the following imports :
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
The following worked for me -
next_page_btn = browser.find_element_by_xpath("//button[#track='next-page']")
next_page_btn.click()
time.sleep(2)
First check element visible on the page or not after that click on that element
Here is the example code:
WebDriverWait wait= new WebDriverWait(driver, 20);
wait.until(ExpectedConditions.visibilityOf("path of the element"));
browser.find_element_by_xpath("path of the element").click();