Run R-Script to get data from GoogleSheets on SQL Server - sql

This code works fine on Visual Studio
OutputDataSet <- read.csv(file = "https://docs.google.com/spreadsheets/d/128qCX1YtvsHr4hERruFx6Ykn72qRkpRGH5brFULl7SY/pub?gid=0&single=true&output=csv", header = TRUE, sep = ",", encoding = "UTF-8", stringsAsFactors = FALSE)
OutputDataSet
I try to send this query using T-SQL
DECLARE #GoogleSheetURL nvarchar(500) =
'OutputDataSet <- read.csv(file = "https://docs.google.com/spreadsheets/d/128qCX1YtvsHr4hERruFx6Ykn72qRkpRGH5brFULl7SY/pub?gid=0&single=true&output=csv", header = TRUE, sep = ",", encoding = "UTF-8", stringsAsFactors = FALSE)'
EXEC sp_execute_external_script
#language =N'R',
#script = #GoogleSheetURL,
#input_data_1 = N' ;'
WITH RESULT SETS (([Дата] date,[Отдел] nvarchar(20),[Продукт] nvarchar(20),[Продавец] nvarchar(20),[Выставлено_шт] int,[Оплачено_шт] int,[Выставлено] int,[Оплачено] int ));
go
How it is possible to get data into SQL Server from GoogleSheet?
ErrorScreen
VSScreen

Here is answer..
There were two rules in windows firewall
AnswerScreen

Related

Setting overwrite == TRUE using memdb and dbplyr

The following shiny app works the first time you run it, but then errors if you change the species input because the table name already exists in memory. I was wondering how to set overwrite == TRUE given the code below?
library(shiny)
library(tidyverse)
library(dbplyr)
ui <- fluidPage(
selectInput("species", "Species", choices = unique(iris$Species),
selected = "setosa"),
tableOutput("SQL_table"),
actionButton("code", "View SQL"),
)
server <- function(input, output) {
# render table
output$SQL_table <- renderTable(
head(iris %>% filter(Species == input[["species"]]))
)
# generate query
SQLquery <- reactive({
sql_render(
show_query(
tbl_memdb(iris) %>%
filter(Species == local(input$species))
)
)
})
# see query
observeEvent( input$code, {
showModal(
modalDialog(
SQLquery()
)
)
})
}
shinyApp(ui = ui, server = server)
since memdb_frame is just a function call of copy_to we can use it directly to set overwrite = TRUE
copy_to(src_memdb(), iris, name = 'iris', overwrite=TRUE)

Character encoding in R MySQL on Linux machine

I'm trying to fetch data which includes some German word with umlaut characters. following the bellow structure everything is fine in windows machine :
Sys.setlocale('LC_ALL','C')
library(RMySQL)
conn <- dbConnect(MySQL(), user = "user", dbname = "database",
host = "host", password = "pass")
sql.query <- paste0("some query")
df <- dbSendQuery(conn, sql.query)
names <- fetch(df, -1)
dbDisconnect(conn)
As an example I have :
names[1230]
[1] "Strübbel"
What should I change in order to get the same result in Linux Ubuntu ?
the query will run without problem, but the result is :
names[1230]
[1] "Str\374bbel"
I have checked This solution, but when I put the 'set character set "utf8"' inside of query I'm getting the following error :
df <- dbSendQuery(conn, sql.query, 'set character set "utf8"')
names <- fetch(df, -1)
Error in .local(conn, statement, ...) :
unused argument ("set character set \"utf8\"")
I should mention the encoding for the result is unknown :
Encoding(names[1230])
[1] "unknown"
and doing the :
Encoding(names[1230]) <- "UTF-8"
names[1230]
[1] "Str<fc>bbel"
does not solve the problem !
Instead of :
Sys.setlocale('LC_ALL','C')
You have to use :
Sys.setlocale('LC_ALL','en_US.UTF-8')
and in the sql query :
library(RMySQL)
conn <- dbConnect(MySQL(), user = "user", dbname = "database",
host = "host", password = "pass")
sql.query <- paste0("some query")
dbSendQuery(conn,'set character set "utf8"')
df <- dbSendQuery(conn, sql.query)
names <- fetch(df, -1)
dbDisconnect(conn)
Not sure if this solution will help you but you could try such approach:
con <- dbConnect(MySQL(), user = "user", dbname = "database",
host = "host", password = "pass", encoding = "ISO-8859-1")
If this encoding doesn't work then try "brute force" with different variants

Create Shiny DataTable based on selected input

The following code (within my shiny app) is giving me this error:
"You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near '' at line 1"
shinyServer(function(input, output, session) {
dataTable <- reactive ({
data <- input$dataset
con <-
dbConnect(
drv = dbDriver("MySQL"),
dbname = "Database",
host = 'remote',
port = 3306,
user = "user",
password = "password")
on.exit(dbDisconnect(con))
dbGetQuery(con, paste("select * from ", data, ";"))
})
output$myTable <- renderDataTable({
datatable(dataTable(),
rownames = FALSE,
filter = "top",
extensions = 'Buttons',
options = list(dom = 'Bfrtip', buttons = I('colvis')))
})
})
shinyUI(fluidPage(
titlePanel("Data Search"),
# SidePanel -------------------------------------------
# -The Input/Dropdown Menu that Control the Output
sidebarLayout(
sidebarPanel(
selectInput(
inputId = "dataset",
label = "Select Dataset",
choices = c("", "Schools", "GradRates"),
selected = "",
multiple = FALSE),
width = 3
),
# MainPanel -------------------------------------------
# -The Output/Table Displayed Based on Input
mainPanel(
dataTableOutput(outputId = "myTable"),
width = 9
)
))
You have most likely problem with this line
dbGetQuery(con, paste("select * from ", data, ";"))
It appears that variable data doesn't contain table name as expected. Check your code where you are inserting table name into data.

sqlSave and sqlDrop in R

Here is my code:
library('RODBC')
db.handle <- odbcDriverConnect('driver={SQL Server Native Client 11.0};server=server_name;database = db_name;trusted_connection=yes')
sql_table <- 'db_name.table_name'
sqlDrop(db.handle, sql_table, errors = TRUE)
sqlSave(db.handle,df_small,tablename = sql_table,safer=FALSE,append=TRUE,
rownames = FALSE)
close(db.handle)
When I execute line:
sqlDrop(db.handle, sql_table, errors = TRUE)
I get error message:
Error in odbcTableExists(channel, sqtable, abort = errors) :
‘db_name.table_name’: table not found on channel
When I execute line:
sqlSave(db.handle,df_small,tablename = sql_table,safer=FALSE,append=TRUE,
rownames = FALSE)
I get the following error message:
Error in sqlSave(db.handle, df_small, tablename = sql_table, safer =
FALSE, : 42S01 2714 [Microsoft][SQL Server Native Client 11.0][SQL
Server]
There is already an object named 'table_name' in the database.
[RODBC] ERROR: Could not SQLExecDirect 'CREATE TABLE
db_name.table_name ("State_rename" varchar(255), "CoverageType"
varchar(255))'
I execute code consecutively and cannot understand how both error messages can be true.
Consider removing the schema from the sqltable variable which SQL Server uses with period qualifier. Specifically, change db_name.table_name to table_name. Reason you do not need this schema is your connection handle already specifies the database. With this connection, you cannot access other database schemas in specified server.
library('RODBC')
db.handle <- odbcDriverConnect(paste0('driver={SQL Server Native Client 11.0};',
'server=server_name;database=db_name;trusted_connection=yes'))
sql_table <- 'table_name'
sqlDrop(db.handle, sql_table, errors = TRUE)
sqlSave(db.handle, df_small, tablename = sql_table, safer = FALSE,
append = TRUE, rownames = FALSE)
close(db.handle)
By the way, you can simply use append=FALSE which will overwrite table (first dropping it and then re-creating it) with no need to call sqlDrop:
sqlSave(db.handle, df_small, tablename = sql_table, safer = FALSE,
append = FALSE, rownames = FALSE)

R dynamic sql query using RODBC and export to .csv

If I run the following code in R studio then it works but I have set sys.sleep. I have a large batch of queries to run and I don't know how long each will take. If I exclude the sys.sleep then the exports are blank as the export is run before the query is complete. Is there a way of getting R to wait until the query is complete?
#setup
#install.packages("stringr", dependencies=TRUE)
require(stringr)
library(RODBC)
#odbc connection
db <- odbcDriverConnect("dsn=DW Master;uid=username;pwd=password;")
#sql to be run
qstr <- "select top 10 * from prod"
#variable
weeknum<-c('201401','201402','201403')
for (i in weeknum )
{
data <- sqlQuery(db, qstr, believeNRows = FALSE)
Sys.sleep(10)
filename<-paste("data_", str_trim(i), ".csv")
filename
write.csv(data, file = filename)
}
From this SO post, try adding the rows_at_time argument:
data <- sqlQuery(db, qstr, believeNRows = FALSE, rows_at_time = 1)
Alternatively you can break up the two processes:
# QUERIES TO DATA FRAMES
weeknum<-c('201401','201402','201403')
for (i in weeknum ) {
data <- sqlQuery(db, qstr, believeNRows = FALSE, rows_at_time = 1)
assign(paste("data",i,sep=""),data)
}
# DATA FRAMES TO CSV FILES
dfList <- c('data201401','data201402','data201403')
for (n in dfList) {
df<-get(n)
filename<-paste(n, ".csv", sep="")
write.csv(df, file = filename)
}