I scrape some websites with vba for fun and I use VBA as tool. I use XMLHTTP and HTMLDocument (cause it's more faster than internetExplorer.Application).
Public Sub XMLhtmlDocumentHTMLSourceScraper()
Dim XMLHTTPReq As Object
Dim htmlDoc As HTMLDocument
Dim postURL As String
postURL = "http://foodffs.tumblr.com/archive/2015/11"
Set XMLHTTPReq = New MSXML2.XMLHTTP
With XMLHTTPReq
.Open "GET", postURL, False
.Send
End With
Set htmlDoc = New HTMLDocument
With htmlDoc
.body.innerHTML = XMLHTTPReq.responseText
End With
i = 0
Set varTemp = htmlDoc.getElementsByClassName("post_glass post_micro_glass")
For Each vr In varTemp
''''the next line is important to solve this issue *1
Cells(1, 1) = vr.outerHTML
Set varTemp2 = vr.getElementsByTagName("SPAN class=post_date")
Cells(i + 1, 3) = varTemp2.Item(0).innerText
''''the next line occur 438Error''''
Set varTemp2 = vr.getElementsByClassName("hover_inner")
Cells(i + 1, 4) = varTemp2.innerText
i = i + 1
Next vr
End Sub
I figure out this problem by *1
cells(1,1) shows me the next things
<DIV class="post_glass post_micro_glass" title=""><A class=hover title="" href="http://foodffs.tumblr.com/post/134291668251/sugar-free-low-carb-coffee-ricotta-mousse-really" target=_blank>
<DIV class=hover_inner><SPAN class=post_date>...............
Yeah all the class tag lost " ". only the first function's class has " "
I really don't know why this situation occur.
//Well I could pharse by getElementsByTagName("span"). but I prefer "class" Tag.....
The getElementsByClassName method is not considered a method of itself; only of the parent HTMLDocument. If you want to use it to locate elements within a DIV element, you need to create a sub-HTMLDocument comprised of the .outerHtml of that specific DIV element.
Public Sub XMLhtmlDocumentHTMLSourceScraper()
Dim xmlHTTPReq As New MSXML2.XMLHTTP
Dim htmlDOC As New HTMLDocument, divSUBDOC As New HTMLDocument
Dim iDIV As Long, iSPN As Long, iEL As Long
Dim postURL As String, nr As Long, i As Long
postURL = "http://foodffs.tumblr.com/archive/2015/11"
With xmlHTTPReq
.Open "GET", postURL, False
.Send
End With
'Set htmlDOC = New HTMLDocument
With htmlDOC
.body.innerHTML = xmlHTTPReq.responseText
End With
i = 0
With htmlDOC
For iDIV = 0 To .getElementsByClassName("post_glass post_micro_glass").Length - 1
nr = Sheet1.Cells(Rows.Count, 3).End(xlUp).Offset(1, 0).Row
With .getElementsByClassName("post_glass post_micro_glass")(iDIV)
'method 1 - run through multiples in a collection
For iSPN = 0 To .getElementsByTagName("span").Length - 1
With .getElementsByTagName("span")(iSPN)
Select Case LCase(.className)
Case "post_date"
Cells(nr, 3) = .innerText
Case "post_notes"
Cells(nr, 4) = .innerText
Case Else
'do nothing
End Select
End With
Next iSPN
'method 2 - create a sub-HTML doc to facilitate getting els by classname
divSUBDOC.body.innerHTML = .outerHTML 'only the HTML from this DIV
With divSUBDOC
If CBool(.getElementsByClassName("hover_inner").Length) Then 'there is at least 1
'use the first
Cells(nr, 5) = .getElementsByClassName("hover_inner")(0).innerText
End If
End With
End With
Next iDIV
End With
End Sub
While other .getElementsByXXXX can readily retrieve collections within another element, the getElementsByClassName method needs to consider what it believes to be the HTMLDocument as a whole, even if you have fooled it into thinking that.
Here's an alternative approach. It's very similar to the original code but uses querySelectorAll to select the relevant span elements. One important point for this method is that vr has to be declared as being a specific element type and not as an IHTMLElement or generic Object:
Option Explicit
Public Sub XMLhtmlDocumentHTMLSourceScraper()
' Changed from generic Object to specific type - not
' strictly necessary to do this
Dim XMLHTTPReq As MSXML2.XMLHTTP60
Dim htmlDoc As HTMLDocument
' These declarations weren't included in the original code
Dim i As Integer
Dim varTemp As Object
' IMPORTANT: vr must be declared as a specific element type and not
' as an IHTMLElement or generic Object
Dim vr As HTMLDivElement
Dim varTemp2 As Object
Dim postURL As String
postURL = "http://foodffs.tumblr.com/archive/2015/11"
' Changed from XMLHTTP to XMLHTTP60 as XMLHTTP is equivalent
' to the older XMLHTTP30
Set XMLHTTPReq = New MSXML2.XMLHTTP60
With XMLHTTPReq
.Open "GET", postURL, False
.Send
End With
Set htmlDoc = New HTMLDocument
With htmlDoc
.body.innerHTML = XMLHTTPReq.responseText
End With
i = 0
Set varTemp = htmlDoc.getElementsByClassName("post_glass post_micro_glass")
For Each vr In varTemp
''''the next line is important to solve this issue *1
Cells(1, 1) = vr.outerHTML
Set varTemp2 = vr.querySelectorAll("span.post_date")
Cells(i + 1, 3) = varTemp2.Item(0).innerText
Set varTemp2 = vr.getElementsByClassName("hover_inner")
' incorporating correction from Jeeped's comment (#56349646)
Cells(i + 1, 4) = varTemp2.Item(0).innerText
i = i + 1
Next vr
End Sub
Notes:
XMLHTTP equivalent to XMLHTTP30 as described here
apparent need to declare a specific element type explored in this question but, unlike getElementsByClassName, querySelectorAll doesn't exist in any version of IHTMLElement
Related
I am trying to search strings in pdf and extract data points after those strings. But I get "Class not registered" error in this line of code. The reference I use is Adobe Acrobat 8.0 Type Library
Please help. Thanks
If (objAvDoc.Open(strFilename, "")) Then
Syntax error in above line
Sub callFunc()
getTextFromPDF ("C:\XXXXXX\XXXXX\6. CDS vs FA\052835022.pdf")
End Sub
Function getTextFromPDF(ByVal strFilename As String) As String
Dim objAVDoc As New AcroAVDoc
Dim objPDDoc As New AcroPDDoc
Dim objPage As AcroPDPage
Dim objSelection As AcroPDTextSelect
Dim objHighlight As AcroHiliteList
Dim pageNum As Long
Dim strText As String
strText = ""
If (objAvDoc.Open(strFilename, "")) Then
Set objPDDoc = objAVDoc.GetPDDoc
For pageNum = 0 To objPDDoc.GetNumPages() - 1
Set objPage = objPDDoc.AcquirePage(pageNum)
Set objHighlight = New AcroHiliteList
objHighlight.Add 0, 10000 ' Adjust this up if it's not getting all the text on the page
Set objSelection = objPage.CreatePageHilite(objHighlight)
If Not objSelection Is Nothing Then
For tCount = 0 To objSelection.GetNumText - 1
strText = strText & objSelection.GetText(tCount)
Next tCount
End If
Next pageNum
objAVDoc.Close 1
End If
getTextFromPDF = strText
End Function
I am seeking for scraping amazon inventory .. Here's the link I used
https://www.amazon.com/Stratford-Pharmaceuticals-Omega-Fatty-Strength/dp/B006JCU54Y/ref=sr_1_2?s=pet-supplies&ie=UTF8&qid=1518816130&sr=1-2&keywords=stratford
There is a part with the title "Compare with similar items" in which I need to extract prices (I have already done that) and also the inventory quantity ..
The second part is not directly obtained .. Manually I have to cick "Add to Cart" then from the next page click "Cart" then from the next page select "Quantity drop down and select 10+ and manually type any large number say 999 and click "Update"
There will be alert message that contains the remaining in inventory like that
(This seller has only 35 of these available. To see if more are available from another seller,) >> so this is the desired number which is 35
Here's the excel file and snapshots that illustrates the manual steps ..
I used IE but if it is possible to use XMLHTTP it would be great of course
Here's the code I devised till now
Sub Test()
Dim ws As Worksheet
Dim ie As Object
Dim allLnks As Object
Dim lnk As Object
Dim r As Long
Dim liElem As Object
Dim prElem As Object
Dim crtElem As Object
Dim elem As Object
Dim cnt As Integer
Dim inputElem As Object
Dim inputEle As Object
Set ws = ThisWorkbook.Worksheets("Sheet2")
Set ie = CreateObject("InternetExplorer.Application")
With ie
.Visible = True
.navigate ("https://www.amazon.com/Stratford-Pharmaceuticals-Omega-Fatty-Strength/dp/B006JCU54Y/ref=sr_1_2?s=pet-supplies&ie=UTF8&qid=1518816130&sr=1-2&keywords=stratford")
Do: DoEvents: Loop Until .readystate = 4
ws.Range("B2").Value = Format(Now(), "dd/mm/yyyy - hh:mm:ss")
Set liElem = .document.getelementbyid("detail-bullets").getelementsbytagname("table")(0).getelementsbytagname("ul")(0)
For Each elem In liElem.getelementsbytagname("li")
If InStr(elem.innerText, "ASIN") > 0 Then ws.Range("B1").Value = Replace(elem.innerText, "ASIN: ", "")
If InStr(elem.innerText, "Rank:") > 0 Then ws.Range("B3").Value = MyUDF(elem.innerText, "Rank: ", "(")
If InStr(elem.innerText, "Review:") > 0 Then ws.Range("B4").Value = Replace(Split(Trim(Split(elem.innerText, "Review: ")(1)), vbLf)(1), Chr(13), "")
Next elem
Set prElem = .document.getelementbyid("comparison_price_row")
For Each elem In prElem.getelementsbytagname("td")
cnt = cnt + 1
ws.Range("A" & cnt + 4).Value = "Seller " & cnt
ws.Range("B" & cnt + 4).Value = elem.getElementsByClassName("a-offscreen")(0).innerText
Next elem
cnt = 0
Set crtElem = .document.getelementbyid("HLCXComparisonTable").getElementsByClassName("a-button-inner")
For Each elem In crtElem
.navigate elem.getelementsbytagname("a")(0).href
Do: DoEvents: Loop Until .readystate = 4
.navigate .document.getElementsByClassName("a-button-inner")(0).getelementsbytagname("a")(0).href
Do: DoEvents: Loop Until .readystate = 4
cnt = cnt + 1
ws.Range("C" & cnt + 4).Value = Replace(Split(Split(MyUDF(.document.getElementsByClassName("a-row a-spacing-base sc-action-quantity sc-action-quantity-right")(0).innerHTML, "maxlength=", "quantity="), "autocomplete")(0), "=")(1), """", "")
Next elem
Stop
'.Quit
End With
End Sub
Function MyUDF(s As String, b As String, a As String) As String
Dim arr() As String
Dim r As String
arr = Split(s, b)
If UBound(arr) > 0 Then
r = arr(1)
arr = Split(r, a)
If UBound(arr) > 0 Then
r = arr(0)
End If
End If
MyUDF = Trim(r)
End Function
Here are snapshots that may help
]4
CSS Selector to get stock info
Taking the following example from your code:
You can use a CSS selector to target the text regarding stock levels.
.sc-product-availability
CSS query example using cart view page (generated by your code):
E.g. CSS query for associated cart view html
The . is the selector for ClassName.
VBA
You can use the .document.querySelectorAll method to retrieve a nodeList of the matching items (2 in the example)
Dim nodeList As Object
Set nodeList = .document.querySelectorAll(".sc-product-availability")
You would then loop over its length to retrieve items (not tested, but this is general method).
Dim i As Long
For i = 0 to nodeList.Length - 1
Debug.Print nodeList.Item(i).innerText
Next i
Hopefully that is useful to you.
Give it a try. It should fetch you the number you are after. I used xmlhttp and Selenium combinedly to make the script run a little faster. I could not use xmlhttp request in my second approach as the link were javascript encrypted.
Upon running the below script you can find out how many of these items the seller has. Even if the seller has no such items, the script will not break as I've already managed that.
There it is:
Sub GetInfo()
Const base As String = "https://www.amazon.com"
Const mainurl As String = "https://www.amazon.com/Stratford-Pharmaceuticals-Omega-Fatty-Strength/dp/B006JCU54Y/ref=sr_1_2?s=pet-supplies&ie=UTF8&qid=1518816130&sr=1-2&keywords=stratford"
Dim Http As New XMLHTTP60, Htmldoc As New HTMLDocument, itext As Object
Dim driver As New ChromeDriver, idic As New Scripting.Dictionary
Dim post As Object, oinput As Object, posts As Object, elem As Object
Dim idrop As Object, oclick As Object, I&, key As Variant
With Http
.Open "GET", mainurl, False
.send
Htmldoc.body.innerHTML = .responseText
End With
With Htmldoc.querySelectorAll("[id^='comparison_add_to_cart_'].a-button-text")
For I = 0 To .Length - 1
idic(base & Replace(.item(I).getAttribute("href"), "about:", "")) = 1
Next I
End With
For Each key In idic.keys
driver.get key
Set post = driver.FindElementByCss("input[value='addToCart']", Raise:=False, timeout:=10000)
If Not post Is Nothing Then
post.Click
End If
Set posts = driver.FindElementById("hlb-view-cart-announce", timeout:=10000)
posts.Click
Set elem = driver.FindElementByCss("span#a-autoid-0-announce", timeout:=10000)
elem.Click
Set idrop = driver.FindElementById("dropdown1_9", timeout:=10000)
idrop.Click
Set oinput = driver.FindElementByCss("input[name='quantityBox']", timeout:=10000)
oinput.SendKeys "100"
Set oclick = driver.FindElementByCss("#a-autoid-1", timeout:=10000)
oclick.Click
Set itext = driver.FindElementByCss(".sc-quantity-update-message span.a-size-base", Raise:=False, timeout:=5000)
If Not itext Is Nothing Then
R = R + 1: Cells(R, 1) = itext.Text
Else
R = R + 1: Cells(R, 1) = "Sorry dear nothing found"
End If
Next key
End Sub
Reference to add:
Selenium Type Library
Microsoft HTML Object Library
Microsoft XML, v6.0
Microsoft Scripting Runtime
Output you may get like below. Now, you can use regex to parse the number 48:
This seller has only 48 of these available. To see if more are available from another seller, go to the product detail page.
I want to use VBA to open up a webpage for me (this webpage is made up of HTML with cells of data), find some keywords, and email out the keywords and a certain number of rows of data above and below the keywords. To do this though, I need to be able to find the location of the keywords (eg. row 3, column 2, or line 4 characters 4-10, etc.). Are there any commands in the Internet Explorer Library that will allow me to do this? So far I have code for one keyword only, that will go to the keyword and select/highlight it. Now I need to find out how to grab a certain amount of rows above/below it and send it out.
Also a side question: If you know a good way to modify my current code to create a nested loop that scans through the whole webpage, and for multiple keywords that would be very helpful!
Sub subFindScrollIE()
Dim boolFound As Boolean
Dim ie As InternetExplorer
Set ie = New InternetExplorer
ie.Navigate "my URL HERE"
strTemp = "KEYWORD1"
Do Until ie.ReadyState = READYSTATE_COMPLETE
'DoEvents
Loop
ie.Visible = True
Set txt = ie.Document.body.createTextRange()
boolFound = txt.findText(strTemp)
txt.moveStart "character", -1
txt.findText strTemp
txt.Select
txt.ScrollIntoView
Set ie = Nothing
End Sub
You could continue with the approach you have started using if you use Regular Expressions to locate the text you are after and the surrounding text.
Personally, I would favor using html objects to find what you're after. Here is some example code to iterate through generic tables:
Sub subFindScrollIE()
Dim strTemp() As Variant, output() As String, txt As String
Dim tr As HTMLTableRow, r As Integer, i As Integer
Dim tRows As IHTMLElementCollection
Dim xlR As Byte, c As Byte
Dim ie As InternetExplorerMedium
Set ie = New InternetExplorerMedium
ie.Visible = True
ie.Navigate "E:\Dev\table.htm"
strTemp = Array("abc", "mno", "vwx", "efg")
Do Until (ie.ReadyState = 4 Or ie.ReadyState = 3): Loop
Set tRows = ie.Document.body.getElementsByTagName("tr")
xlR = 2
' loop through rows
For r = 0 To tRows.Length - 1
Set tr = tRows(r)
' loop through search text
For i = 0 To UBound(strTemp)
' search row for string
txt = LCase(tr.innerHTML)
If (InStr(txt, LCase(strTemp(i))) > 0) Then
' search string found. split table data into array
txt = tr.innerHTML
txt = Replace(txt, "</td><td>", "~")
txt = Replace(txt, "<td>", "")
txt = Replace(txt, "</td>", "")
output = Split(txt, "~")
' populate cells from array
For c = 0 To UBound(output)
Sheet1.Cells(xlR, c + 2) = output(c)
Next c
xlR = xlR + 2
End If
Next i
Next r
ie.Quit
Set ie = Nothing
End Sub
I have always found you a great help when I have questions. This time it's something related to Excel VBA.
I have a macro that brings back data from a website. You simply have to hard code the connection string into it.( xmlHttp.Open "GET", "http://www.example.com", False )
Sub GET_HTML_DATA()
Dim xmlHttp As Object
Dim TR_col As Object, TR As Object
Dim TD_col As Object, TD As Object
Dim row As Long, col As Long
Set xmlHttp = CreateObject("MSXML2.XMLHTTP.6.0")
xmlHttp.Open "GET", "http://www.example.com", False
xmlHttp.setRequestHeader "Content-Type", "text/xml"
xmlHttp.send
Dim html As Object
Set html = CreateObject("htmlfile")
html.body.innerHTML = xmlHttp.responseText
Dim tbl As Object
Set tbl = html.getElementById("curr_table")
row = 1
col = 1
Set TR_col = html.getElementsByTagName("TR")
For Each TR In TR_col
Set TD_col = TR.getElementsByTagName("TD")
For Each TD In TD_col
Cells(row, col) = TD.innerText
col = col + 1
Next
col = 1
row = row + 1
Next
End Sub
I was wondering if and how can this code be changed to accept a parameter as the connection string so I can call on it Run "GET_HTML_DATA(parameter)"
I have tried to declare a parameter in the parenthesis and include that in place of www.example.com but when I run the macro it tells me The macro may not be available in this workbook..."
Am I doing it right or is there another way I do not know?
In your sub in the parenthesis you need to declare the parameter as (input As String) and then use "input" in your code. Then you can use that Run "GET_HTML_DATA(parameter)". Alternatively, I guess you can simply put all your code in a simple function if you would have to return some output like
Function myFunction(input As String) As Double
//code goes here
End Function
In a VBA module in excel 2007, is it possible to call a web service? If so, any code snippets? How would I add the web reference?
Yes You Can!
I worked on a project that did that (see comment). Unfortunately no code samples from that one, but googling revealed these:
How you can integrate data from several Web services using Excel and VBA
STEP BY STEP: Consuming Web Services through VBA (Excel or Word)
VBA: Consume Soap Web Services
Here's an overview from MS:
Consuming Web Services in Excel 2007
For an updated answer see this SO question:
calling web service using VBA code in excel 2010
Both threads should be merged though.
In Microsoft Excel Office 2007 try installing "Web Service Reference Tool" plugin. And use the WSDL and add the web-services. And use following code in module to fetch the necessary data from the web-service.
Sub Demo()
Dim XDoc As MSXML2.DOMDocument
Dim xEmpDetails As MSXML2.IXMLDOMNode
Dim xParent As MSXML2.IXMLDOMNode
Dim xChild As MSXML2.IXMLDOMNode
Dim query As String
Dim Col, Row As Integer
Dim objWS As New clsws_GlobalWeather
Set XDoc = New MSXML2.DOMDocument
XDoc.async = False
XDoc.validateOnParse = False
query = objWS.wsm_GetCitiesByCountry("india")
If Not XDoc.LoadXML(query) Then 'strXML is the string with XML'
Err.Raise XDoc.parseError.ErrorCode, , XDoc.parseError.reason
End If
XDoc.LoadXML (query)
Set xEmpDetails = XDoc.DocumentElement
Set xParent = xEmpDetails.FirstChild
Worksheets("Sheet3").Cells(1, 1).Value = "Country"
Worksheets("Sheet3").Cells(1, 1).Interior.Color = RGB(65, 105, 225)
Worksheets("Sheet3").Cells(1, 2).Value = "City"
Worksheets("Sheet3").Cells(1, 2).Interior.Color = RGB(65, 105, 225)
Row = 2
Col = 1
For Each xParent In xEmpDetails.ChildNodes
For Each xChild In xParent.ChildNodes
Worksheets("Sheet3").Cells(Row, Col).Value = xChild.Text
Col = Col + 1
Next xChild
Row = Row + 1
Col = 1
Next xParent
End Sub
Excel 2013 Read Data from a web service and bash the JSON till you can get what you want out of it (given the JSON will always be in the same format).
This code should just work without the need for any plugins.
You will need your own free API key from the currency converter website though.
I used it to load the USD to GBP value into a cell on my sheet.
Option Explicit
Sub Test_LateBinding()
Dim objRequest As Object
Dim strUrl As String
Dim blnAsync As Boolean
Dim strResponse As String
Set objRequest = CreateObject("MSXML2.XMLHTTP")
strUrl = "https://free.currconv.com/api/v7/convert?q=USD_GBP&compact=ultra&apiKey=[MY_API_KEY]"
blnAsync = True
With objRequest
.Open "GET", strUrl, blnAsync
.SetRequestHeader "Content-Type", "application/json"
.Send
While objRequest.readyState <> 4
DoEvents
Wend
strResponse = .responsetext
End With
Dim responseArray() As String
responseArray = Split(strResponse, ":", -1)
Dim value As String
value = responseArray(1)
Dim valueArray() As String
valueArray = Split(value, "}", -1)
Dim finalValue As String
finalValue = valueArray(0)
Sheet2.Cells(22, "C") = finalValue
End Sub