Scraping Historical Data Table using VBA - vba

I'm trying to scrape Historical Data Table from Yahoo finance using VBA. However I only managed to get only 1 data instead of the whole table. Is there is any easy method to do it? Please help me.
Public Sub History()
Dim html As HTMLDocument, hTable As HTMLTable '<== Tools > References > Microsoft HTML Object Library
Set html = New HTMLDocument
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", "https://finance.yahoo.com/quote/0166.KL/history?p=0166.KL", False
.send
html.body.innerHTML = .responseText
End With
Application.ScreenUpdating = False
ActiveSheet.Cells(2, 2).Value = html.getElementsByClassName("Py(10px) Pstart(10px)")(0).innerText
Application.ScreenUpdating = True
End Sub

Your code returns only the first element of the Element object. This means ...(0).innerText.
Please, try the next code:
Public Sub History()
Dim html As HTMLDocument, hTable As HTMLTable '<== Tools > References > Microsoft HTML Object Library
Dim hist As Object, el As Variant, i As Long, j As Long, startCel As Range, boolDiv As Boolean
Set html = New HTMLDocument
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", "https://finance.yahoo.com/quote/0166.KL/history?p=0166.KL", False
.send
html.Body.innerHTML = .responseText
End With
Application.ScreenUpdating = False
Set hist = html.getElementsByClassName("Py(10px) Pstart(10px)")
i = 2: j = 2
Set startCel = ActiveSheet.cells(1, 2)
Application.Calculation = xlCalculationManual
For Each el In hist
If j <= 7 Then
ActiveSheet.cells(i, j).Value = el.innerText: j = j + 1
Else
If InStr(el.innerText, "Dividend") > 0 Then boolDiv = True
j = 2: i = i + 1
ActiveSheet.cells(i, j).Value = el.innerText
If boolDiv Then
boolDiv = False: j = 2: i = i + 1
Else: j = j + 1: End If
End If
Next
Application.Calculation = xlCalculationAutomatic
Application.ScreenUpdating = True
End Sub

Related

Data Scraping from Website - receiving error on different player pages

I'm trying to data scrape game logs from basketball reference.com. It worked perfectly on two players I choose (demar derozan and lamarcus aldridge). But then I started going through other players and it just wouldn't scrape the data for many of the other players (Kevin Durant).
I have NO Idea why it wouldn't work. For example, I tried Stephen Curry and it worked fine, but players like Draymond Green and Kevin Durant, the code would just not scrape the data at all. For some reason after the column Date, everything stopped working.
Sub Data()
Dim ieObj As InternetExplorer
Dim htmlEle As IHTMLElement
Dim i As Integer
i = 1
Set ieObj = New InternetExplorer
ieObj.Visible = True
ieObj.navigate "https://www.basketball-reference.com/players/d/duranke01/gamelog/2019"
Application.ScreenUpdating = False
Application.DisplayStatusBar = True
Application.Wait Now + TimeValue("00:00:05")
For Each htmlEle In ieObj.document.getElementsByClassName("stats_table")(0).getElementsByTagName("tr")
With ActiveSheet
.Range("A" & i).Value = htmlEle.Children(0).textContent
.Range("B" & i).Value = htmlEle.Children(1).textContent
.Range("C" & i).Value = htmlEle.Children(2).textContent
.Range("D" & i).Value = htmlEle.Children(3).textContent
End With
i = i + 1
On Error Resume Next
Next htmlEle
End Sub
The error always happens on this line:
.Range("D" & i).Value = htmlEle.Children(3).textContent
I tried skipping columns to but it still wouldn't work.
I found no problem with using id for table, as seen in .responseText and using xmlhttp over browser.
I only tested with 3 urls - in sheet 1 A1:A3
https://www.basketball-reference.com/players/d/duranke01/gamelog/2019
https://www.basketball-reference.com/players/c/curryst01/gamelog/2019
https://www.basketball-reference.com/players/g/greendr01/gamelog/2019
With this site sometimes you can encounter tables inside comments so I stripped out the comment tags before processing. This was not necessary for the links I tried.
I use clipboard to copy paste but you could just use
Set hTable = html.getElementById("pgl_basic")
Then loop the tr and td as you wish using getElementsByTagName.
References (VBE>Tools>References):
Microsoft HTML Object Library
Option Explicit
Public Sub GetPlayerInfo()
Dim urls(), i As Long, html As HTMLDocument, hTable As Object
Dim ws As Worksheet, wsCurrent As Object, clipboard As Object
Dim lastRow As Long, playerIdentifier As String, arr() As String
Set clipboard = GetObject("New:{1C3B4210-F441-11CE-B9EA-00AA006B1A69}")
Set ws = ThisWorkbook.Worksheets("Sheet1")
lastRow = ws.Cells(ws.rows.Count, "A").End(xlUp).Row
urls = Application.Transpose(ws.Range("A1:A" & lastRow).Value)
Set html = New HTMLDocument
With CreateObject("MSXML2.XMLHTTP")
For i = LBound(urls) To UBound(urls)
.Open "GET", urls(i), False
.send
html.body.innerHTML = Replace$(Replace$(.responseText, "-->", vbNullString), "<!--", vbNullString) 'remove comments
arr = Split(urls(i), "/")
playerIdentifier = arr(5)
If SheetExists(playerIdentifier) Then
With ThisWorkbook.Worksheets(playerIdentifier).Cells
.ClearContents
.ClearFormats
Set wsCurrent = ThisWorkbook.Worksheets(playerIdentifier)
End With
Else
Set wsCurrent = ThisWorkbook.Worksheets.Add
wsCurrent.name = playerIdentifier
End If
Set hTable = html.querySelector("#pgl_basic")
clipboard.SetText hTable.outerHTML
clipboard.PutInClipboard
wsCurrent.Range("A1").PasteSpecial
Set wsCurrent = Nothing: Set hTable = Nothing: Erase arr: playerIdentifier = vbNullString
Application.CutCopyMode = False
Next
End With
End Sub
Public Function SheetExists(ByVal sheetName As String) As Boolean '<== function by #Rory
SheetExists = Evaluate("ISREF('" & sheetName & "'!A1)")
End Function
IE
Note proper page load wait used.
Option Explicit
Public Sub GetPlayerInfo()
Dim ieObj As InternetExplorer, htmlEle As IHTMLElement
Dim urls(), i As Long, j As Long, hTable As Object
Dim ws As Worksheet, wsCurrent As Object
Dim lastRow As Long, playerIdentifier As String, arr() As String
Application.ScreenUpdating = False
On Error GoTo errHand
Set ws = ThisWorkbook.Worksheets("Sheet1")
lastRow = ws.Cells(ws.Rows.Count, "A").End(xlUp).Row
urls = Application.Transpose(ws.Range("A1:A" & lastRow).Value)
Set ieObj = New InternetExplorer
With ieObj
.Visible = True
For j = LBound(urls) To UBound(urls)
.navigate urls(j)
While .Busy Or .readyState <> 4: DoEvents: Wend
arr = Split(urls(j), "/")
playerIdentifier = arr(5)
If SheetExists(playerIdentifier) Then
With ThisWorkbook.Worksheets(playerIdentifier).Cells
.ClearContents
.ClearFormats
Set wsCurrent = ThisWorkbook.Worksheets(playerIdentifier)
End With
Else
Set wsCurrent = ThisWorkbook.Worksheets.Add
wsCurrent.Name = playerIdentifier
End If
i = 1
Set hTable = .document.getElementById("pgl_basic")
If Not hTable Is Nothing Then
For Each htmlEle In hTable.getElementsByTagName("tr")
With wsCurrent
.Range("A" & i).Value = htmlEle.Children(0).textContent
.Range("B" & i).Value = htmlEle.Children(1).textContent
.Range("C" & i).Value = htmlEle.Children(2).textContent
.Range("D" & i).Value = htmlEle.Children(3).textContent
End With
i = i + 1
Next htmlEle
End If
Set wsCurrent = Nothing: Set hTable = Nothing: Erase arr: playerIdentifier = vbNullString
Next
End With
errHand:
Application.ScreenUpdating = True
If Err.Number <> 0 Then Debug.Print Err.Number, Err.Description
ie.Quit
End Sub

Can't get Web Scraped Table to populate multiple cells VBA

I am trying to scrape a table from a website via VBA. I am able to gather all the data from the table however I am only able to get it to populate a single cell, rather than break out the information as formatted on the website. Essentially the data should be in four columns and then go down as many rows as there is data. I know I'm missing something simple here and I just can't put my finger on it. Help please! coding is below:
Sub WebScrape()
Dim objIE As InternetExplore'
Dim ele As Object
Dim y As Integer
objIE.navigate "http://www.uscfinvestments.com/holdings/usci"
Do While objIE.Busy = True Or objIE.readyState <> 4: DoEvents: Loop
y = 1
For Each ele In objIE.document.getElementById("holdingsTableDiv").getElementsByTagName("table")
Debug.Print ele.textContent
Sheets("Sheet1").Range("A" & y).Value = ele.Children(0).textContent
Next
ActiveWorkbook.Save
End Sub
I show a number of methods below. Personally, I prefer the last which uses an API.
Using clipboard:
A nice easy way, if you want to appear as on page, is to simply copy the table to clipboard and paste
Option Explicit
Public Sub GetInfo()
Dim ie As InternetExplorer, hTable As Object, clipboard As Object, ws As Worksheet, t As Date
Const MAX_WAIT_SEC As Long = 10
Set ws = ThisWorkbook.Worksheets("Sheet1")
Set clipboard = GetObject("New:{1C3B4210-F441-11CE-B9EA-00AA006B1A69}")
Set ie = New InternetExplorer
With ie
.Visible = True
.Navigate2 "http://www.uscfinvestments.com/holdings/usci"
While .Busy Or .readyState < 4: DoEvents: Wend
t = Timer
Do
On Error Resume Next
Set hTable = .document.getElementById("holdingsTableID")
On Error GoTo 0
If Timer - t > MAX_WAIT_SEC Then Exit Do
Loop While hTable Is Nothing
If Not hTable Is Nothing Then
clipboard.SetText hTable.outerHTML
clipboard.PutInClipboard
ws.Cells(1, 1).PasteSpecial
End If
.Quit
End With
End Sub
Looping table:
If you want to loop rows and columns of a table then you can use row class name and row number to determine how to write out
Option Explicit
Public Sub GetInfo()
Dim ie As InternetExplorer, hTable As Object, ws As Worksheet, t As Date
Const MAX_WAIT_SEC As Long = 10
Set ws = ThisWorkbook.Worksheets("Sheet1")
Set ie = New InternetExplorer
With ie
.Visible = True
.Navigate2 "http://www.uscfinvestments.com/holdings/usci"
While .Busy Or .readyState < 4: DoEvents: Wend
t = Timer
Do
On Error Resume Next
Set hTable = .document.getElementById("holdingsTableID")
On Error GoTo 0
If Timer - t > MAX_WAIT_SEC Then Exit Do
Loop While hTable Is Nothing
If Not hTable Is Nothing Then
WriteTable hTable, 1, ws
End If
.Quit
End With
End Sub
Public Sub WriteTable(ByVal hTable As HTMLTable, Optional ByVal startRow As Long = 1, Optional ByVal ws As Worksheet)
If ws Is Nothing Then Set ws = ActiveSheet
Dim tRow As Object, td As Object, r As Long, c As Long, th As Object
r = startRow
With ws
For Each tRow In hTable.getElementsByTagName("tr")
c = 1
If r = startRow Or tRow.className = "subHeader" Then
For Each th In tRow.getElementsByTagName("th")
.Cells(r, c) = th.innerText
c = c + 1
Next
Else
For Each td In tRow.getElementsByTagName("td")
.Cells(r, c).Value = td.innerText
c = c + 1
Next td
End If
r = r + 1
Next tRow
End With
End Sub
API:
There is an API which serves the data in a json format
https://cssecure.alpsinc.com/api/v1//holding/usci
It requires authentication. I am using jsonconverter.bas to parse the json returned. After downloading and adding the .bas you need to go VBE > Tools > References > Add a reference to Microsoft Scripting Runtime.
Option Explicit
Public Sub GetValues()
Dim json As Object, authorization As String
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", "http://www.uscfinvestments.com/uscfinvestments-template/assets/javascript/api_key.php", False
.send
authorization = Split(Split(.responseText, "'")(1), "'")(0)
.Open "GET", "https://cssecure.alpsinc.com/api/v1//holding/usci", False
.setRequestHeader "Authorization", authorization
.send
Set json = JsonConverter.ParseJson(.responseText)
End With
Dim arr(), headers(), item As Object, r As Long
headers = Array("Security", "Quantity", "Price", "Market Value")
r = 1
ReDim arr(1 To json.Count, 1 To 4)
For Each item In json
arr(r, 1) = item("name")
arr(r, 2) = item("shares")
Dim test As String
If IsNull(item("contractprice")) Then
arr(r, 3) = item("settlementprice")
Else
arr(r, 3) = item("contractprice")
End If
arr(r, 4) = item("marketvalue")
r = r + 1
Next
With ThisWorkbook.Worksheets("Sheet1")
.Cells(1, 1).Resize(1, UBound(headers) + 1) = headers
.Cells(2, 1).Resize(UBound(arr, 1), UBound(arr, 2)) = arr
End With
End Sub
There is nothing much to add here. QHarr has already covered them. I used hardcoded delay instead of explicit wait to make the script less verbose.
Sub GetContent()
Dim Html As HTMLDocument, elem As Object, tRow As Object, C&, R&
With New InternetExplorer
.Visible = False
.navigate "http://www.uscfinvestments.com/holdings/usci"
While .Busy = True Or .readyState < 4: DoEvents: Wend
Set Html = .Document
End With
Application.Wait Now + TimeValue("00:00:03") 'make it 05 if it fails somehow
For Each elem In Html.getElementById("holdingsTableID").Rows
For Each tRow In elem.Cells
C = C + 1: ThisWorkbook.Worksheets("Sheet1").Cells(R + 1, C) = tRow.innerText
Next tRow
C = 0: R = R + 1
Next elem
End Sub

Optimizing the Vba code

There is a program ,that parsing website . Program works well , but too long . I want to simplify/speed it up. Tell me please ,maybe there are any specialized sites on this issue ? For any help I will be grateful .
How program works:
First , by the hyperlink , the program goes to the site , where it finds a certain table of elements
Then it takes out the “href” of each element , turns it into a hyperlink , and inserts it into Excel in the 1-st table
Then it extracts the text of each element and inserts it into Excel in the 2-nd table
Then it goes through the elements of the 1-st and 2-nd tables , so that in the 3-rd table each element contains a “hyperlink +text”
Sub Softгиперссылки()
Application.DisplayAlerts = False
Call mainмассивы
Application.DisplayAlerts = True
End Sub
Sub mainмассивы()
Dim r As Range
Dim firstAddress As String
Dim iLoop As Long
Dim book1 As Workbook
Dim sheetNames(1 To 19) As String
Dim Ssilka As String
sheetNames(1) = "Лист1"
sheetNames(2) = "Лист2"
sheetNames(3) = "Лист3"
sheetNames(4) = "Лист4"
sheetNames(5) = "Лист5"
sheetNames(6) = "Лист6"
sheetNames(7) = "Лист7"
sheetNames(8) = "Лист8"
sheetNames(9) = "Лист9"
sheetNames(10) = "Лист10"
sheetNames(11) = "Лист11"
sheetNames(12) = "Лист12"
sheetNames(13) = "Лист13"
sheetNames(14) = "Лист14"
sheetNames(15) = "Лист15"
sheetNames(16) = "Лист16"
sheetNames(17) = "Лист17"
sheetNames(18) = "Лист18"
sheetNames(19) = "Лист19"
'пропускаем ошибку
Set book1 = Workbooks.Open("E:\Super M\Проект ставки\Поиск решения\Усов 7\Условия для андердогов\6.xlsm")
iLoop = -1
With book1.Worksheets("Лист1").Range("R34:R99")
For Each r In .Rows
If r.Value = 1 Then
iLoop = iLoop + 1
Ssilka = r.Offset(, -13).Hyperlinks.Item(1).Address
.Parent.Parent.Worksheets(sheetNames(1)).Activate
.Parent.Parent.Save
extractTable Ssilka, book1, iLoop
End If
Next r
End With
book1.Save
book1.Close
Exit Sub
End Sub
Function extractTable(Ssilka As String, book1 As Workbook, iLoop As Long)
Dim oDom As Object, oTable As Object, oRow As Object
Dim iRows As Integer, iCols As Integer
Dim x As Integer, y As Integer
Dim data()
Dim oHttp As Object
Dim oRegEx As Object
Dim sResponse As String
Dim oRange As Range
Dim Perem1 As String
Dim Perem2 As String
'для гиперссылки
' get page
Set oHttp = CreateObject("MSXML2.XMLHTTP")
oHttp.Open "GET", Ssilka, False
oHttp.Send
' cleanup response
sResponse = StrConv(oHttp.responseBody, vbUnicode)
Set oHttp = Nothing
sResponse = Mid$(sResponse, InStr(1, sResponse, "<!DOCTYPE "))
Set oRegEx = CreateObject("vbscript.regexp")
With oRegEx
.MultiLine = True
.Global = True
.IgnoreCase = False
.Pattern = "<(script|SCRIPT)[\w\W]+?</\1>"
sResponse = .Replace(sResponse, "")
End With
Set oRegEx = Nothing
' create Document from response
Set oDom = CreateObject("htmlFile")
oDom.Write sResponse
DoEvents
' table with results, indexes starts with zero
Set oTable = oDom.getelementsbytagname("table")(3)
DoEvents
iRows = oTable.Rows.Length
iCols = oTable.Rows(1).Cells.Length
' first row and first column contain no intresting data
ReDim data(1 To iRows - 1, 1 To iCols - 1)
' fill in data array
For x = 1 To iRows - 1
Set oRow = oTable.Rows(x)
For y = 1 To iCols - 1
If oRow.Cells(y).Children.Length > 0 Then
data(x, y) = oRow.Cells(y).getelementsbytagname("a")(0).getattribute("href")
End If
Next y
Next x
Set oRow = Nothing
Set oTable = Nothing
Set oDom = Nothing
' put data array on worksheet
Set oRange = book1.ActiveSheet.Cells(110, 26 + (iLoop * 21)).Resize(iRows - 1, iCols - 1)
oRange.NumberFormat = "#"
oRange.Value = data
oRange.Replace What:="about:", Replacement:="http://allscores.ru/soccer/"
Set oRange = Nothing
'!!!! для текста
' get page
Set oHttp = CreateObject("MSXML2.XMLHTTP")
oHttp.Open "GET", Ssilka, False
oHttp.Send
' cleanup response
sResponse = StrConv(oHttp.responseBody, vbUnicode)
Set oHttp = Nothing
sResponse = Mid$(sResponse, InStr(1, sResponse, "<!DOCTYPE "))
Set oRegEx = CreateObject("vbscript.regexp")
With oRegEx
.MultiLine = True
.Global = True
.IgnoreCase = False
.Pattern = "<(script|SCRIPT)[\w\W]+?</\1>"
sResponse = .Replace(sResponse, "")
End With
Set oRegEx = Nothing
' create Document from response
Set oDom = CreateObject("htmlFile")
oDom.Write sResponse
DoEvents
' table with results, indexes starts with zero
Set oTable = oDom.getelementsbytagname("table")(3)
DoEvents
iRows = oTable.Rows.Length
iCols = oTable.Rows(1).Cells.Length
' first row and first column contain no intresting data
ReDim data(1 To iRows - 1, 1 To iCols - 1)
' fill in data array
For x = 1 To iRows - 1
Set oRow = oTable.Rows(x)
For y = 1 To iCols - 1
If oRow.Cells(y).Children.Length > 0 Then
data(x, y) = oRow.Cells(y).innerText
End If
Next y
Next x
Set oRow = Nothing
Set oTable = Nothing
Set oDom = Nothing
' put data array on worksheet
Set oRange = book1.ActiveSheet.Cells(185, 26 + (iLoop * 21)).Resize(iRows - 1, iCols - 1)
oRange.NumberFormat = "#"
oRange.Value = data
Set oRange = Nothing
'!!!!! цикл для текст+гиперссылка
For A = 0 To 4
For B = 0 To 65
Perem1 = book1.ActiveSheet.Cells(110 + B, (26 + (iLoop * 21)) + A).Value
Perem2 = book1.ActiveSheet.Cells(185 + B, (26 + (iLoop * 21)) + A).Value
book1.ActiveSheet.Hyperlinks.Add Anchor:=Cells(34 + B, (26 + (iLoop * 21)) + A), Address:=Perem1, TextToDisplay:=Perem2
Next
Next
End Function
There are number of things that could be done to improve efficiency, but that is probably better conducted on CodeReview.
I will make mention, however, of your use of late-bound variables. You'll achieve much faster performance with early-binding:
'Late-bound variable declaration and creation
Dim oRegExp As Object
Set oRegEx = CreateObject("vbscript.regexp")
With oRegEx
'....
End With
'Late-bound reference only:
'No variable declaration required, the variable only survives as long as the With Block
With CreateObject("vbscript.regexp")
'....
End With
'Early-bound - Add a reference to Microsoft VBScript Regular Expressions 5.5
'This is the fastest and most efficient use of a new RegExp object, and you get intellisense in the VBE
With New RegExp
'....
End With
You should also consider installing the free, open-source Rubberduck VBA add-in for the Visual Basic Editor (disclaimer - I'm a contributor), which will come with many more suggestions and optimizations for you, and it will automatically indent your code for improved readability.

Web table not fetching the correct data by VBA

With the followoing code I can fetch the price table from this webpage http://www.idealo.de/preisvergleich/OffersOfProduct/143513.html
But from another page here this table is not being fetched...though these two pages are identical. I can't figure out where am lacking.
Any help on this is deeply appreciable.
Sub TableExample()
Dim IE As Object
Dim doc As Object
Dim strURL As String
strURL = "http://www.idealo.de/preisvergleich/OffersOfProduct/143513.html"
Set IE = CreateObject("InternetExplorer.Application")
With IE
.navigate strURL
Do Until .readyState = 4: DoEvents: Loop
Do While .Busy: DoEvents: Loop
Set doc = IE.document
GetAllTables doc
.Quit
End With
End Sub
Sub GetAllTables(doc As Object)
Dim ws As Worksheet
Dim rng As Range
Dim tbl As Object
Dim rw As Object
Dim cl As Object
Dim tabno As Long
Dim nextrow As Long
Dim i As Long
Set ws = Sheets("Sheet1")
For Each tbl In doc.getElementsByTagName("TABLE")
tabno = tabno + 1
nextrow = nextrow + 1
Set rng = ws.Range("B" & nextrow)
rng.Offset(, -1) = "Table " & tabno
If tabno = 5 Then
For Each rw In tbl.Rows
colno = 5
For Each cl In rw.Cells
If colno = 5 And nextrow > 5 Then
Set classColl = doc.getElementsByClassName("shop")
Set imgTgt = classColl(nextrow - 6).getElementsByTagName("img")
rng.Value = imgTgt(0).getAttribute("alt")
Else
rng.Value = cl.innerText
End If
Set rng = rng.Offset(, 1)
i = i + 1
colno = colno + 1
Next cl
nextrow = nextrow + 1
Set rng = rng.Offset(1, -i)
i = 0
Next rw
End If
Next tbl
End Sub
The table numbers change between the two urls. Table 5 is the table you're interested in for the first url, but Table 6 is the one of interest in the 2nd url. However both tables of interest have the same id ("offers-list"), so instead of looking for the fifth table, adjust your code to look for the Table with the ID "offers-list"
change
If tabno = 5 Then
to
If InStr(1, tbl.outerhtml, "Produktbezeichnung des Shops", vbTextCompare) > 0 Then
This will get you close. There are other changes on the second web page that your current code isn't quite handling - but like I said this will get you close.
I have changed the If tabno = 5 Then with
For Each tbl In doc.getElementsByTagName("table")
' tabno = tabno + 1
If tbl.className = "orangebox_rowborder" Then
Thanks #Ron for guiding me for this...thanks a ton Dude
The following works for each URL so is more robust and is a lot faster than the method you are currently using as it does away with the IE browser nvaigation.
For a lengthy code explanation please see here.
Option Explicit
'Tools > References > HTML Object Library
Public Sub GetTable()
Const URL = "https://www.idealo.de/preisvergleich/OffersOfProduct/1866742_-335-billingham.html" '<==Change this
Dim sResponse As String, listItems As Object, html As HTMLDocument, headers()
headers = Array("product_id", "product_name", "product_price", "product_category", "currency", "spr", "shop_name", "delivery_time", "shop_rating", "position", "free_return", "approved_shipping")
Application.ScreenUpdating = False
With CreateObject("MSXML2.XMLHTTP")
.Open "GET", URL, False
.send
sResponse = StrConv(.responseBody, vbUnicode)
End With
sResponse = Mid$(sResponse, InStr(1, sResponse, "<!DOCTYPE "))
Set html = New HTMLDocument
With html
.body.innerHTML = sResponse
Set listItems = .getElementsByClassName("productOffers-listItemOfferPrice")
End With
Dim currentItem As Long
With ActiveSheet
.Cells(1, 1).Resize(1, UBound(headers) + 1) = headers
For currentItem = 0 To listItems.Length - 1
Dim tempString As String, columnValues() As String
tempString = TidyString(GetTransactionInfo(listItems(currentItem).outerHTML), "&#\d+;")
columnValues = GetColumnValues(tempString, headers)
.Cells(currentItem + 2, 1).Resize(1, UBound(columnValues) + 1) = columnValues
Next currentItem
End With
Application.ScreenUpdating = True
End Sub
Public Function GetTransactionInfo(ByVal inputString) As String
'Split to get just the transaction items i.e. Headers and associated values
GetTransactionInfo = Split(Split(inputString, """transaction"",")(1), "}")(0)
End Function
Public Function TidyString(ByVal inputString As String, ByVal matchPattern As String) As String
'Extract transaction info
'Use regex to find these unwanted strings and replace pattern e.g. &#\d+;
'Example inputString
Dim regex As Object, tempString As String
Set regex = CreateObject("VBScript.RegExp")
With regex
.Global = True
.MultiLine = True
.IgnoreCase = False
.Pattern = matchPattern
End With
If regex.test(inputString) Then
TidyString = regex.Replace(inputString, vbNullString)
Else
TidyString = inputString
End If
End Function
Public Function GetColumnValues(ByVal inputString As String, ByVal headers As Variant) As Variant
' Example input string "product_id": "143513","product_name": "Canon 500D Nahlinse 72mm","product_price": "128.0","product_category": "26570","currency": "EUR","spr": "cfd","shop_name": "computeruniverse.net","delivery_time": "long","shop_rating": "100","position": "1","free_return": "14","approved_shipping": "false"
' Extract just the inner string value of each header e.g. 143513
Dim arr() As String, currentItem As Long, tempString As String
tempString = inputString
For currentItem = LBound(headers) To UBound(headers)
tempString = TidyString(tempString, Chr$(34) & headers(currentItem) & Chr$(34) & ":")
Next currentItem
arr = Split(Replace$(tempString, Chr$(34), vbNullString), ",")
GetColumnValues = arr
End Function

Excel 2010 VBA - Pull Table from website

I'm very new to Excel VBA and I was given some code to play with. I successfully modified it once, but then I tried to modify it again and it won't pull the right info. It might be because I don't know the table id for sure? I don't understand all of the code from this example...that's probably the other issue. Anyways I'm trying to pull the Historical Prices Table from this page. It pulls some data, but not the correct data. Any help would be appreciated. Thanks!
Here is my current code:
Sub GrabHistData()
Dim Ptrtbl As Long, r As Long, c As Long
Dim htm As Object
Dim elemCollection As Object
Set htm = CreateObject("htmlFile")
With CreateObject("msxml2.xmlhttp")
.Open "GET", "http://finance.yahoo.com/q/hp?s=TWTR&a=04&b=30&c=2012&d=01&e=7&f=2014&g=d", False
.send
htm.body.innerhtml = .responsetext
End With
Set elemCollection = htm.getElementsByTagName("TABLE")
Ptrtbl = 1
For Each elem In elemCollection
Ptrtbl = Ptrtbl + 1
If elem.ID <> "yfncsumtab" Then GoTo Nxtelem
With elemCollection(Ptrtbl)
For c = 0 To (.Rows(r).Cells.Length - 1)
Cells(r + 1, c + 1) = .Rows(r).Cells(c).innertext
Next c
End With
Exit For
Nxtelem:
Next elem
End Sub
If you want to stick with your current approach, this works for me...
Sub GrabHistData()
Dim Ptrtbl As Long, r As Long, c As Long
Dim htm As Object
Dim elemCollection As Object
Set htm = CreateObject("htmlFile")
With CreateObject("msxml2.xmlhttp")
.Open "GET", "http://finance.yahoo.com/q/hp?s=TWTR&a=04&b=30&c=2012&d=01&e=7&f=2014&g=d", False
.send
htm.body.innerhtml = .responseText
End With
Set elemCollection = htm.getElementsByTagName("td")
For Each itm In elemCollection
If itm.classname = "yfnc_tabledata1" Then
ActiveCell = itm.innertext
If ActiveCell.Column = 7 Then
ActiveCell.Offset(1, -6).Select
Else
ActiveCell.Offset(0, 1).Select
End If
End If
Next
End Sub