Unable to convert my existing scraper to xmlhttp request - vba

I have written a script to get Title from any random websites. It works flawlessly. I have written this using Internet Explorer. I've tried a lot but can't make the same using xmlhttp request as performance is a big issue to consider. What I've tried so far is:
Sub Title_scraping()
Dim IE As Object
Dim doc As Object, cel As Range
For Each cel In Range("A1:A5")
Set IE = CreateObject("InternetExplorer.Application")
IE.navigate cel.Value
While IE.Busy
DoEvents
Wend
Set doc = IE.document
x = x + 1
Cells(x, 2) = doc.title
Next cel
End Sub
Sites I've tried with and got results:
https://stackoverflow.com/documentation/
https://codereview.stackexchange.com/
https://yts.ag/browse-movies

Combining your code and the code posted here, here is your final code:
Sub GetData()
Dim title As String
Dim objHttp As Object, cel As Range, x As Long
Set objHttp = CreateObject("MSXML2.ServerXMLHTTP")
For Each cel In Range("A1:A5")
objHttp.Open "GET", cel.Value, False
On Error Resume Next
objHttp.send ""
title = objHttp.responseText
If InStr(1, UCase(title), "<TITLE>") Then
title = Mid(title, InStr(1, UCase(title), "<TITLE>") + Len("<TITLE>"))
title = Mid(title, 1, InStr(1, UCase(title), "</TITLE>") - 1)
Else
title = ""
End If
x = x + 1
Cells(x, 2) = title
Next cel
End Sub

Related

Can't get Web Scraped Table to populate multiple cells VBA

I am trying to scrape a table from a website via VBA. I am able to gather all the data from the table however I am only able to get it to populate a single cell, rather than break out the information as formatted on the website. Essentially the data should be in four columns and then go down as many rows as there is data. I know I'm missing something simple here and I just can't put my finger on it. Help please! coding is below:
Sub WebScrape()
Dim objIE As InternetExplore'
Dim ele As Object
Dim y As Integer
objIE.navigate "http://www.uscfinvestments.com/holdings/usci"
Do While objIE.Busy = True Or objIE.readyState <> 4: DoEvents: Loop
y = 1
For Each ele In objIE.document.getElementById("holdingsTableDiv").getElementsByTagName("table")
Debug.Print ele.textContent
Sheets("Sheet1").Range("A" & y).Value = ele.Children(0).textContent
Next
ActiveWorkbook.Save
End Sub
I show a number of methods below. Personally, I prefer the last which uses an API.
Using clipboard:
A nice easy way, if you want to appear as on page, is to simply copy the table to clipboard and paste
Option Explicit
Public Sub GetInfo()
Dim ie As InternetExplorer, hTable As Object, clipboard As Object, ws As Worksheet, t As Date
Const MAX_WAIT_SEC As Long = 10
Set ws = ThisWorkbook.Worksheets("Sheet1")
Set clipboard = GetObject("New:{1C3B4210-F441-11CE-B9EA-00AA006B1A69}")
Set ie = New InternetExplorer
With ie
.Visible = True
.Navigate2 "http://www.uscfinvestments.com/holdings/usci"
While .Busy Or .readyState < 4: DoEvents: Wend
t = Timer
Do
On Error Resume Next
Set hTable = .document.getElementById("holdingsTableID")
On Error GoTo 0
If Timer - t > MAX_WAIT_SEC Then Exit Do
Loop While hTable Is Nothing
If Not hTable Is Nothing Then
clipboard.SetText hTable.outerHTML
clipboard.PutInClipboard
ws.Cells(1, 1).PasteSpecial
End If
.Quit
End With
End Sub
Looping table:
If you want to loop rows and columns of a table then you can use row class name and row number to determine how to write out
Option Explicit
Public Sub GetInfo()
Dim ie As InternetExplorer, hTable As Object, ws As Worksheet, t As Date
Const MAX_WAIT_SEC As Long = 10
Set ws = ThisWorkbook.Worksheets("Sheet1")
Set ie = New InternetExplorer
With ie
.Visible = True
.Navigate2 "http://www.uscfinvestments.com/holdings/usci"
While .Busy Or .readyState < 4: DoEvents: Wend
t = Timer
Do
On Error Resume Next
Set hTable = .document.getElementById("holdingsTableID")
On Error GoTo 0
If Timer - t > MAX_WAIT_SEC Then Exit Do
Loop While hTable Is Nothing
If Not hTable Is Nothing Then
WriteTable hTable, 1, ws
End If
.Quit
End With
End Sub
Public Sub WriteTable(ByVal hTable As HTMLTable, Optional ByVal startRow As Long = 1, Optional ByVal ws As Worksheet)
If ws Is Nothing Then Set ws = ActiveSheet
Dim tRow As Object, td As Object, r As Long, c As Long, th As Object
r = startRow
With ws
For Each tRow In hTable.getElementsByTagName("tr")
c = 1
If r = startRow Or tRow.className = "subHeader" Then
For Each th In tRow.getElementsByTagName("th")
.Cells(r, c) = th.innerText
c = c + 1
Next
Else
For Each td In tRow.getElementsByTagName("td")
.Cells(r, c).Value = td.innerText
c = c + 1
Next td
End If
r = r + 1
Next tRow
End With
End Sub
API:
There is an API which serves the data in a json format
https://cssecure.alpsinc.com/api/v1//holding/usci
It requires authentication. I am using jsonconverter.bas to parse the json returned. After downloading and adding the .bas you need to go VBE > Tools > References > Add a reference to Microsoft Scripting Runtime.
Option Explicit
Public Sub GetValues()
Dim json As Object, authorization As String
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", "http://www.uscfinvestments.com/uscfinvestments-template/assets/javascript/api_key.php", False
.send
authorization = Split(Split(.responseText, "'")(1), "'")(0)
.Open "GET", "https://cssecure.alpsinc.com/api/v1//holding/usci", False
.setRequestHeader "Authorization", authorization
.send
Set json = JsonConverter.ParseJson(.responseText)
End With
Dim arr(), headers(), item As Object, r As Long
headers = Array("Security", "Quantity", "Price", "Market Value")
r = 1
ReDim arr(1 To json.Count, 1 To 4)
For Each item In json
arr(r, 1) = item("name")
arr(r, 2) = item("shares")
Dim test As String
If IsNull(item("contractprice")) Then
arr(r, 3) = item("settlementprice")
Else
arr(r, 3) = item("contractprice")
End If
arr(r, 4) = item("marketvalue")
r = r + 1
Next
With ThisWorkbook.Worksheets("Sheet1")
.Cells(1, 1).Resize(1, UBound(headers) + 1) = headers
.Cells(2, 1).Resize(UBound(arr, 1), UBound(arr, 2)) = arr
End With
End Sub
There is nothing much to add here. QHarr has already covered them. I used hardcoded delay instead of explicit wait to make the script less verbose.
Sub GetContent()
Dim Html As HTMLDocument, elem As Object, tRow As Object, C&, R&
With New InternetExplorer
.Visible = False
.navigate "http://www.uscfinvestments.com/holdings/usci"
While .Busy = True Or .readyState < 4: DoEvents: Wend
Set Html = .Document
End With
Application.Wait Now + TimeValue("00:00:03") 'make it 05 if it fails somehow
For Each elem In Html.getElementById("holdingsTableID").Rows
For Each tRow In elem.Cells
C = C + 1: ThisWorkbook.Worksheets("Sheet1").Cells(R + 1, C) = tRow.innerText
Next tRow
C = 0: R = R + 1
Next elem
End Sub

Get final URL with Excel VBA

I've been strugling with this issue, and trying to find a solution here in StackOverflow, but nothing helped.
I have thousands of links of images (Column A), that will get you to the final JPG url. It's not a redirect link because I've tried with different websites and it doesn't detect it.
Here is an example:
https://www.pepperl-fuchs.com/global/en/doci.htm?docilang=ENG&view=showproductpicbypartno&partno=000046
It will get you here:
https://files.pepperl-fuchs.com/webcat/navi/productInfo/pd/d428540a.jpg
So I would like to extrapolate all the final links in Column B.
I found some code that opens IE for each link, but it probably misses the function to copy the URL and paste it in the cell:
Sub Test()
Dim IE As Object
Dim URL As Range
Dim objDocument As Object
Dim x As Integer
Set IE = CreateObject("InternetExplorer.Application")
With IE
.Visible = True
For Each URL In Range("A2:A16")
.Navigate URL.Value
While .busy Or .ReadyState <> 4: DoEvents: Wend
If LCase(TypeName(objDocument)) = "htmldocument" Then
Cells(A, 1).Value = objDocument.URL
Cells(A, 2).Value = objDocument.Title
x = x + 1
End If
Next
End With
End Sub
Can you guys help me figure out what is missing? Unfortunately I'm not really familiar with VBA.
Thank you very much
Try this
Sub Test()
Dim IE As Object
Dim URL As Range
Dim objDocument As Object
Dim x As Integer
Set IE = CreateObject("InternetExplorer.Application")
With IE
.Visible = True
For Each URL In Range("A2:A16")
.Navigate URL.Value
While .busy Or .ReadyState <> 4: DoEvents: Wend
URL.Offset(, 1).Value = .LocationURL
Next
End With
End Sub
Try this code
Sub Test()
Dim html As HTMLDocument
Dim ie As Object
Dim objDocument As Object
Dim url As Range
Dim x As Integer
Set ie = CreateObject("InternetExplorer.Application")
x = 1
With ie
.Visible = True
For Each url In Range("A2:A3")
.navigate url.Value
While .Busy Or .readyState <> 4: DoEvents: Wend
Set html = .document
x = x + 1
Cells(x, 2).Value = html.url
Cells(x, 3).Value = html.Title
Next url
End With
End Sub
Need more to test with but this will be a lot faster and if you can easily adapt to using an array to loop faster than looping sheet by using Dim arr(): arr = .Range("A1:A" & .Cells(.Rows.Count, "A").End(xlUp).Row).Value and looping the first dimension.
Option Explicit
Public Sub GetInfo()
Dim rng As Range
With Worksheets("Sheet1")
For Each rng In .Range("A1:A" & .Cells(.Rows.Count, "A").End(xlUp).Row)
If InStr(rng.Value, "http") > 0 Then Debug.Print GetURL(rng.Value)
Next
End With
End Sub
Public Function GetURL(ByVal url As String) As String
Dim sResponse As String, s As Long, e As Long
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", url, False
.send
sResponse = StrConv(.responseBody, vbUnicode)
End With
s = InStr(1, sResponse, "https")
e = InStr(1, sResponse, ".jpg") + 4
GetURL = Mid(sResponse, s, e - s)
End Function
This does assume that all your links follow the same pattern as the first.

I'm trying to import innertext from a td-class. Getting lots of extra text

I'm trying to get the innertext of 'wfm-bodyText'. So, I want to loop through items on a webpage and import, in this case, 'H. Validate Gamer Correct'
I thought the script below would work, but it actually gives me all kinds of extra text!!
Public Sub CopyFromURL()
Dim IE As InternetExplorer, doc As HTMLDocument
Dim thisClass As IHTMLElement2, thisLink As IHTMLElement
Dim rng As Range, cell As Range
Const READYSTATE_COMPLETE As Integer = 4
Dim TR_col As Object, TR As Object
Dim TD_col As Object, TD As Object
Dim row As Long, col As Long
row = 1
'Set rng = Range("A1:A5")
'For Each cell In rng
Set IE = CreateObject("InternetExplorer.Application")
IE.Navigate URL
Do While (IE.Busy Or IE.ReadyState <> READYSTATE_COMPLETE)
DoEvents
Loop
Set TR_col = IE.Document.getElementsByTagName("TR")
For Each TR In TR_col
Set TD_col = TR.getElementsByTagName("TD")
row = ActiveSheet.Cells(ActiveSheet.Rows.Count, "A").End(xlUp).row
col = 1
For Each TD In TD_col
Cells(row, col) = TD.innerText
col = col + 1
Next
row = row + 1
Next
'Next cell
IE.Quit
End Sub
The URL is behind a firewall. I can't share it.
Again, below is the structure of the page. I want to copy the 'E. Verify phone number if applicable' into my sheet.
I tried the script below in an attempt to get every element from the page.
Sub DumpData()
Set IE = CreateObject("InternetExplorer.Application")
IE.Visible = True
URL = "http://www.SharePoint.aspx"
'Wait for site to fully load
IE.Navigate2 URL
Do While IE.Busy = True
DoEvents
Loop
RowCount = 1
With Sheets("Sheet1")
.Cells.ClearContents
RowCount = 1
For Each itm In IE.document.all
.Range("A" & RowCount) = itm.tagname
.Range("B" & RowCount) = itm.ID
.Range("C" & RowCount) = itm.classname
.Range("D" & RowCount) = Left(itm.innertext, 1024)
RowCount = RowCount + 1
Next itm
End With
End Sub
I still can't tell where the relevant data is coming from.

VBA HTML Scraping - '.innertext' from complex table

All,
I've created the following Module to scrape a single value (1m % change in London house prices) from the below address:
https://www.hometrack.com/uk/insight/uk-cities-house-price-index/
The specific value is nested within the following code:
The below VBA code is my attempt at scraping. I, perhaps wrongly, feel that I am very close to capturing the value - but the code will not work.
Does anyone know where I am going wrong here? It doesn't show an error message but also doesn't output any values.
Sub HousePriceData()
Dim wb As Workbook
Dim ws As Worksheet
Dim TxtRng As Range
Dim ie As Object
Dim V As Variant
Dim myValue As Variant
Set ie = CreateObject("INTERNETEXPLORER.APPLICATION")
ie.NAVIGATE "https://www.hometrack.com/uk/insight/uk-cities-house-price-index/"
ie.Visible = False
While ie.ReadyState <> 4
DoEvents
Wend
Set wb = ActiveWorkbook
Set ws = wb.Sheets("Input")
Set TxtRng = ws.Range("C15")
Set myValue = ie.document.getElementById("cities-index-table").getElementsByTagName("tr")(7).g‌​etElementsByTagName("td")(5)
TxtRng = myValue.innerText
End Sub
Try to use XHR and primitive parsing instead of awkward IE:
Sub Test()
Dim strUrl As String
Dim strTmp As String
Dim arrTmp As Variant
strUrl = "https://www.hometrack.com/uk/insight/uk-cities-house-price-index/"
With CreateObject("MSXML2.XMLHttp")
.Open "GET", strUrl, False
.Send ""
strTmp = .ResponseText
End With
arrTmp = Split(strTmp, ">London</a></td>", 2)
strTmp = arrTmp(1)
arrTmp = Split(strTmp, "<td>", 7)
strTmp = arrTmp(6)
arrTmp = Split(strTmp, "</td>", 2)
strTmp = arrTmp(0)
ThisWorkbook.Sheets("Input").Range("C15").Value = strTmp
End Sub
try use this
Dim Engmt As String
Engmt = "ERRORHERE"
On Error Resume Next
Engmt = Trim(ie.document.getElementById("cities-index- table").getElementsByTagName("tr")(12).g‌​etElementsByTagName("td")(4).innerText)
On Error GoTo 0
If Engmt = "ERRORHERE" Then
TxtRng.Value = "ERROR"
Else
TxtRng.Value = Engmt
End If

Data Extraction from Webpage to Excel using VBA

I was trying to pull a table from Webpage, so far i was successful pull a table from Webpage, unfortunately i have some links in each row of table, when i pulled the table from Webpage, i am getting output without link, just text, is there any way we can pull table from webpage using VBA including hyperlinks.
Here is my code:
Sub TableExample()
Dim IE As Object
Dim doc As Object
Dim strURL As String
strURL = "HERE I USED MY URL"
' replace with URL of your choice
Set IE = CreateObject("InternetExplorer.Application")
With IE
'.Visible = True
.Navigate strURL
Do Until .readyState = 4: DoEvents: Loop
Do While .Busy: DoEvents: Loop
Set doc = IE.Document
GetAllTables doc
.Quit
End With
End Sub
Sub GetAllTables(doc As Object)
' get all the tables from a webpage document, doc, and put them in a new worksheet
Dim ws As Worksheet
Dim rng As Range
Dim tbl As Object
Dim rw As Object
Dim cl As Object
Dim tabno As Long
Dim nextrow As Long
Dim I As Long
Set ws = Worksheets.Add
For Each tbl In doc.getElementsByTagName("TABLE")
tabno = tabno + 1
nextrow = nextrow + 1
Set rng = ws.Range("B" & nextrow)
rng.Offset(, -1) = "Table " & tabno
For Each rw In tbl.Rows
For Each cl In rw.Cells
rng.Value = cl.outerText
Set rng = rng.Offset(, 1)
I = I + 1
Next cl
nextrow = nextrow + 1
Set rng = rng.Offset(1, -I)
I = 0
Next rw
Next tbl
ws.Cells.ClearFormats
End Sub
When you execute "rng.Value = cl.outerText" you get only text. If you need to have all links and other html - please use innerHTML property.
Just replace "rng.Value = cl.outerText" to "rng.Value = cl.innerHTML". This will return entire html with links ;)