I try to automatically click on a given downloadButton id but it doesn't work on this demo website https://www.globalsqa.com/demo-site/progress-bar.
Chromedriver opens, then nothing happens on the website and at some point it closes due to timeout I imagine ?
Traceback :
Traceback (most recent call last):
File "c:\Users\x\Dropbox\PC\Documents\Python Project\selenium tutorial\tutorial1.py", line 22, in <module>
my_element = driver.find_element(By.ID, "downloadButton")
File "C:\Python310\lib\site-packages\selenium\webdriver\support\wait.py", line 89, in until
raise TimeoutException(message, screen, stacktrace)
I do some implicit wait supposing that the button doesn't exist but it won't work.
import os
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
ser = Service("C:/Program Files (x86)/chromedriver.exe")
options = webdriver.ChromeOptions()
options.add_experimental_option('excludeSwitches', ['enable-logging'])
driver = webdriver.Chrome(service=ser, options=options)
driver.get("https://www.globalsqa.com/demo-site/progress-bar/")
# Wait for checkbox to be located
button_wait = EC.presence_of_element_located((By.ID, 'downloadButton'))
WebDriverWait(driver, 10).until(button_wait)
driver.implicitly_wait(30)
# my_element = driver.find_element_by_id('downloadButton')
my_element = driver.find_element(By.ID, "downloadButton")
my_element.click()
chrome-version 98.0.4758.81
The element you are trying to access is inside an iframe.
So you first have to switch into that iframe in order to access elements inside it.
Like the following:
import os
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
ser = Service("C:/Program Files (x86)/chromedriver.exe")
options = webdriver.ChromeOptions()
options.add_experimental_option('excludeSwitches', ['enable-logging'])
driver = webdriver.Chrome(service=ser, options=options)
wait = WebDriverWait(driver, 20)
driver.get("https://www.globalsqa.com/demo-site/progress-bar/")
wait.until(EC.frame_to_be_available_and_switch_to_it((By.CSS_SELECTOR,"iframe.demo-frame.lazyloaded")))
wait.until(EC.visibility_of_element_located((By.ID, "downloadButton"))).click()
When finished working inside the iframe you will have to switch back to a defaul content with
driver.switch_to.default_content()
Related
I am struggling to keep the session open all the time. The code executes, but only when the link is present.
driver.refresh
doesn't seem to keep the page refreshed.
Is there a way to refresh the page every couple seconds?
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.chrome.options import Options
user_name = "username"
password = "password"
driver = webdriver.Chrome()
driver.get('https://mywebsite.com/boardslots.aspx?date=07%2F31%2F2022')
element = driver.find_element(By.ID,"txtUsername")
element.send_keys(user_name)
element = driver.find_element(By.ID,"txtPassword")
element.send_keys(password)
element.send_keys(Keys.RETURN)
while True:
try:
driver.find_element(By.LINK_TEXT,"Claim")
except NoSuchElementException:
driver.refresh
else:
driver.find_element(By.LINK_TEXT,"Claim").click()
try:
# element = WebDriverWait(driver,1)
element= WebDriverWait(driver,
10).until(expected_conditions.visibility_of_element_located((By.XPATH, '/html
/body/form/div[3]/div[3]/table/tbody/tr[2]/td/table/tbody/tr[2]/td[9]/a')))
finally:
driver.find_element(BY.ID,"btnSubmitSingle").click()
#/html/body/form/div[3]/div[3]/table/tbody/tr[2]/td/table/tbody/tr[2]/td[9]/a
I would like to stay logged in a site which require login every about 2 hours. My idea is to open a parallel session, login and inject the cookies in the first session.
To achieve this, firstly, i tried to create a small example to use the use the cookies of webdriver_chrome session 1 to webdriver_chrome session 2, but a new login is asked.
Thanks in advance for your help
import pickle
import os
from selenium import webdriver
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from time import sleep
import os
import pickle
option = webdriver.ChromeOptions()
option.add_argument("--no-sandbox")
driver = webdriver.Chrome(options=option)
driver.get("https://BANK_WEB_SITE.com/login/")
sleep(5)
LOGIN()
pickle.dump(driver.get_cookies(), open("cookies.pkl", "wb"))
option = webdriver.ChromeOptions()
option.add_argument("--no-sandbox")
driver2 = webdriver.Chrome(options=option)
for cookie in cookies:
driver.add_cookie(cookie)
driver.refresh()
sleep(5)
driver2.get("https://BANK_WEB_SITE.com/MY_PORTFOLIO")
I solved the issue as the following code. The solution was:
1) fix the mistake changing "driver.add_cookie(cookie)" to "driver2.add_cookie(cookie)"
2) load the web page before loading cookies
import pickle
import os
from selenium import webdriver
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from time import sleep
import os
import pickle
option = webdriver.ChromeOptions()
option.add_argument("--no-sandbox")
driver = webdriver.Chrome(options=option)
driver.get("https://BANK_WEB_SITE.com/login/")
sleep(5)
LOGIN()
pickle.dump(driver.get_cookies(), open("cookies.pkl", "wb"))
option = webdriver.ChromeOptions()
option.add_argument("--no-sandbox")
driver2 = webdriver.Chrome(options=option)
driver2.get("https://BANK_WEB_SITE.com/MY_PORTFOLIO/")
cookies = pickle.load(open("cookies.pkl", "rb"))
for cookie in cookies:
driver2.add_cookie(cookie)
driver2.get("https://BANK_WEB_SITE.com/MY_PORTFOLIO/")
I'm building a web scraping using the selenium command. I was able to read the data from the table on the first and second pages, however, I cannot read the data on the following pages. Can anybody help me?
Below is the code I am using.
NoSuchElementException: Message: no such element: Unable to locate
element:
{"method":"xpath","selector":"//table[1]/tbody[1]/tr[#class='painel'
and 1]/td[2]/a[1 and #href='javascript:pesquisar(2);']"} (Session
info: headless chrome=86.0.4240.75)
import time
import requests
import pandas as pd
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver import ActionChains
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
import os
import json
url = 'https://www.desaparecidos.pr.gov.br/desaparecidos/desaparecidos.do?action=iniciarProcesso&m=false'
option = Options()
driver = webdriver.Chrome('chromedriver',chrome_options=chrome_options)
driver.get(url)
time.sleep (5)
lista = driver.find_element_by_xpath('//*[#id="list_tabela"]/tbody')
lista_text = lista.text
print (lista_text)
driver.implicitly_wait(5)
driver.find_element_by_xpath("//table[1]/tbody[1]/tr[#class='painel' and 1]/td[2]/a[1 and #href='javascript:pesquisar(2);']").click()
time.sleep (5)
lista = driver.find_element_by_xpath('//*[#id="list_tabela"]/tbody')
lista_text = lista.text
print (lista_text)
driver.implicitly_wait(10)
driver.find_element_by_xpath("//table[1]/tbody[1]/tr[#class='painel' and 1]/td[2]/a[3 and #href='javascript:pesquisar(3);']").click()
time.sleep (10)
lista = driver.find_element_by_xpath('//*[#id="list_tabela"]/tbody')
lista_text = lista.text
print (lista_text)
As my code shows, I am trying to automate the process of logging and other things using selenium in python 3.7. I am struck as it is showing "AttributeError: element_to_be_clickable has no object click" in the line botton_to_click().
from bs4 import BeautifulSoup
from bs4.element import Tag
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
base = 'https://www.wsj.com'
url = 'https://www.wsj.com/search/term.html?KEYWORDS=cybersecurity&min-date=2018/04/01&max-date=2019/03/31&isAdvanced=true&daysback=90d&andor=AND&sort=date-desc&source=wsjarticle,wsjpro&page=1'
browser = webdriver.Safari(executable_path='/usr/bin/safaridriver')
browser.get(url)
browser.find_element_by_id('editions-select').click()
browser.find_element_by_id('na,us').click()
botton_to_click = WebDriverWait(browser, 10).until(EC.element_to_be_clickable, ((By.XPATH,"//button[#type='button' and contains(.,'Sign In')]")))
botton_to_click.click()
browser.find_element_by_id('username').send_keys('##$%*&^%##$')
browser.find_element_by_id('password').send_keys('##*$%^!#')
browser.find_element_by_id('basic-login').click()
browser.find_element_by_id('masthead-container').click()
browser.find_element_by_id('searchInput').send_keys('cybersecurity')
browser.find_element_by_name('ADVANCED SEARCH').click()
browser.find_element_by_id('dp1560924131783').send_keys('2018/04/01')
browser.find_element_by_id('dp1560924131784').send_keys('2019/03/31')
browser.find_element_by_id('wsjblogs').click()
browser.find_element_by_id('wsjvideo').click()
browser.find_element_by_id('interactivemedia').click()
browser.find_element_by_id('sitesearch').click()
browser.close()
Thanks.
Remove the comma after element_to_be_clickable as given below, It may resolve your issue.
botton_to_click = WebDriverWait(browser, 10).until(EC.element_to_be_clickable, ((By.XPATH,"//button[#type='button' and contains(.,'Sign In')]")))
to
botton_to_click = WebDriverWait(browser, 10).until(EC.element_to_be_clickable((By.XPATH,"//button[#type='button' and contains(.,'Sign In')]")))
I want to keep chrome open and close the console. I've read that webdriver.service.stop() will do this but I can't get it to work. I've tried importing several things from selenium that are namec service but I stil get selenium.webdriver has no attribute'service'. The last two imports are unused. Currently when I run the script without pycharmt the console starts and if I close it after it runs it closes the chrome window as well. I would like to not have to manually close the console window.
This is my updated code as of 2019-03-15:
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.common import service
from selenium.webdriver.ie.service import service
chrome_options = ChromeOptions()
chrome_options.add_argument("--incognito")
chrome_options.add_argument("--start-maximized")
chrome_options.set_capability("detach", True)
DynamoForum = webdriver.Chrome(chrome_options=chrome_options)
DynamoForum.get("https://forum.dynamobim.com/")
Parent_window = DynamoForum.window_handles[0]
login = DynamoForum.find_element_by_class_name("header-buttons").click()
wait = WebDriverWait(DynamoForum, 20)
window_child = DynamoForum.window_handles[1]
DynamoForum.switch_to.window(window_child)
wait.until(EC.visibility_of_element_located((By.ID, "userName"))).send_keys("abc#abc.com")
DynamoForum.find_element_by_id("verify_user_btn").click()
wait.until(EC.visibility_of_element_located((By.ID, "password"))).send_keys("abc")
DynamoForum.find_element_by_id("btnSubmit").click()
try:
DynamoForum.service.stop()
except AttributeError:
pass
Below is the old Code:
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.common import service
from selenium.webdriver.ie.service import service
chrome_options = ChromeOptions()
chrome_options.add_argument("--incognito")
DynamoForum = webdriver.Chrome(chrome_options=chrome_options)
DynamoForum.get("https://forum.dynamobim.com/")
Parent_window = DynamoForum.window_handles[0]
login = DynamoForum.find_element_by_class_name("header-buttons").click()
wait = WebDriverWait(DynamoForum, 20)
window_child = DynamoForum.window_handles[1]
DynamoForum.switch_to.window(window_child)
wait.until(EC.visibility_of_element_located((By.ID,
"userName"))).send_keys("abc#abc.com")
DynamoForum.find_element_by_id("verify_user_btn").click()
wait.until(EC.visibility_of_element_located((By.ID,
"password"))).send_keys("abc")
DynamoForum.find_element_by_id("btnSubmit").click()
webdriver.service.stop()
This is the error I'm getting. This has been solved by changing webdriver.serive.stop to DynamoForum.service.stop.
C:/Users/cjr/PycharmProjects/DynamoForum/DynamoForum.py:13:
DeprecationWarning: use options instead of chrome_options
DynamoForum = webdriver.Chrome(chrome_options=chrome_options)
Traceback (most recent call last):
File "C:/Users/cjr/PycharmProjects/DynamoForum/DynamoForum.py", line 29,
in <module>
webdriver.service.stop()
AttributeError: module 'selenium.webdriver' has no attribute 'service'
This is workaround only
I know this is not the correct approach, but for time being you can replace the last line with below. This will pass the execution, but keep your browser open after execution.
try:
webdriver.service.stop()
except AttributeError:
pass
Let me know if this is helpful. Will keep digging into this in the mean time.