I have the following function:
Public Function OleDBCSVToDataTable(directory As String, tableName As String, fileName As String, Optional start As Long = 0) As DataTable
Dim CnStr = "Provider=Microsoft.ACE.OLEDB.12.0;Data Source=" & directory & ";Extended Properties='Excel 8.0;HDR=YES'"
Dim dt As DataTable = GetTableSchema(tableName)
Using Adp As New OleDbDataAdapter("select * from [" & fileName & "]", CnStr)
Adp.Fill(start, 1000, dt)
End Using
Return dt
End Function
The function is designed to read a CSV into a data table using OLEDB for import into SQL, however I am receiving this error:
"The Microsoft Access database engine cannot open or write to the file
'C:\TEST'. It is already opened exclusively by another user, or you
need permission to view and write its data."
I have attempted this solution. All permissions have been granted (permissions are Full Control across users):
I have seen this solution as well, however, the proposed options other than OLEDB are solutions that don't seem to work with CSV. Besides, I imagine there are native libraries.
I am open to suggestions for better ways to accomplish this, however, based on requirements - large CSVs, data validation - this appears to be the best, assuming I am able to get it working.
How about importing the CSV file into a DataGridView, and then exporting from that object into MS Access?
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.IO;
using System.Globalization;
using System.Configuration;
using System.Data.OleDb;
namespace WindowsFormsApplication1
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
public void button1_Click(object sender, EventArgs e)
{
string delimiter = ",";
string tablename = "medTable";
DataSet dataset = new DataSet();
OpenFileDialog openFileDialog1 = new OpenFileDialog();
openFileDialog1.Filter = "CSV Files (*.csv)|*.csv|All Files (*.*)|*.*";
openFileDialog1.FilterIndex = 1;
if (openFileDialog1.ShowDialog() == DialogResult.OK)
{
if (MessageBox.Show("Are you sure you want to import the data from \n " + openFileDialog1.FileName + "?", "Are you sure?", MessageBoxButtons.YesNo) == DialogResult.Yes)
{
filename = openFileDialog1.FileName;
StreamReader sr = new StreamReader(filename);
string csv = File.ReadAllText(openFileDialog1.FileName);
dataset.Tables.Add(tablename);
dataset.Tables[tablename].Columns.Add("Order ID");
dataset.Tables[tablename].Columns.Add("Product");
dataset.Tables[tablename].Columns.Add("Unit Price");
dataset.Tables[tablename].Columns.Add("Quantity");
dataset.Tables[tablename].Columns.Add("Discount");
string allData = sr.ReadToEnd();
string[] rows = allData.Split("\r".ToCharArray());
foreach (string r in rows)
{
string[] items = r.Split(delimiter.ToCharArray());
dataset.Tables[tablename].Rows.Add(items);
}
this.dataGridView1.DataSource = dataset.Tables[0].DefaultView;
MessageBox.Show(filename + " was successfully imported. \n Please review all data before sending it to the database.", "Success!", MessageBoxButtons.OK);
}
else
{
this.Close();
}
}
}
public string filename { get; set; }
private void openFileDialog1_FileOk(object sender, CancelEventArgs e)
{
}
private void Import_Load(object sender, EventArgs e)
{
}
private void button4_Click(object sender, EventArgs e)
{
Application.Exit();
}
private void button2_Click(object sender, EventArgs e)
{
this.Close();
}
private void button3_Click(object sender, EventArgs e)
//remove the semicolon, and add brackets below after line
{
//create the connection string
string connString = "Provider=Microsoft.ACE.OLEDB.12.0;Data Source=C:\\Users\\Ryan\\Desktop\\Coding\\Microsoft Access\\Northwind_2012.mdb";
//create the database query
string query = "SELECT * FROM [OrderDetailsTest]";
//create an OleDbDataAdapter to execute the query
OleDbDataAdapter dAdapter = new OleDbDataAdapter(query, connString);
//create a command builder
OleDbCommandBuilder cBuilder = new OleDbCommandBuilder(dAdapter);
//create a DataTable to hold the query results
DataTable dTable = new DataTable();
//fill the DataTable
dAdapter.Fill(dTable);
//the DataGridView
DataGridView dataGridView1 = new DataGridView();
//BindingSource to sync DataTable and DataGridView
BindingSource bSource = new BindingSource();
//set the BindingSource DataSource
bSource.DataSource = dTable;
//set the DataGridView DataSource
dataGridView1.DataSource = bSource;
// An update function to get the changes back into the database.
dAdapter.Update(dTable);
}
}
}
I am using AZURE SQL (SQL Server 2016) and creating a query to give me output in JSON object. I am adding FOR JSON PATH at the end of query.
When I execute the procedure without adding FOR JSON PATH to the query, I get 244 rows (no of records in my table); but when I execute the procedure by adding FOR JSON PATH I get message 33 rows and also I get JSON object which is truncated.
I tested this with different types of queries including simple query selecting only 10 columns, but I always get less number of rows with FOR JSON PATH and JSON object truncated at the end.
Here is my query
SELECT
[Id]
,[countryCode]
,[CountryName]
,[FIPS]
,[ISO1]
,[ISO2]
,[ISONo]
,[capital]
,[region]
,[currency]
,[currencyCode]
,[population]
,[timeZone]
,[timeZoneCode]
,[ISDCode]
,[currencySymbol]
FROM
[dbo].[countryDB]
Above query returns 2 rows.
And I use following query to get output in JSON
SELECT
[Id]
,[countryCode]
,[CountryName]
,[FIPS]
,[ISO1]
,[ISO2]
,[ISONo]
,[capital]
,[region]
,[currency]
,[currencyCode]
,[population]
,[timeZone]
,[timeZoneCode]
,[ISDCode]
,[currencySymbol]
FROM
[dbo].[countryDB]
FOR JSON PATH
Above query returns 33 rows and output is
[{"Id":1,"countryCode":"AD","CountryName":"Andorra","FIPS":"AN","ISO1":"AD","ISO2":"AND","ISONo":20,"capital":"Andorra la Vella","region":"Europe","currency":"Euro","currencyCode":"EUR","population":67627,"timeZone":2.00,"timeZoneCode":"DST","ISDCode":"+376"},{"Id":2,"countryCode":"AE","CountryName":"United Arab Emirates","FIPS":"AE","ISO1":"AE","ISO2":"ARE","ISONo":784,"capital":"Abu Dhabi","region":"Middle East","currency":"UAE Dirham","currencyCode":"AED","population":2407460,"timeZone":4.00,"timeZoneCode":"STD","ISDCode":"+971"},{"Id":3,"countryCode":"AF","CountryName":"Afghanistan","FIPS":"AF","ISO1":"AF","ISO2":"AFG","ISONo":4,"capital":"Kabul","region":"Asia","currency":"Afghani","currencyCode":"AFA","population":26813057,"timeZone":4.50,"timeZoneCode":"STD","ISDCode":"+93"},{"Id":4,"countryCode":"AG","CountryName":"Antigua and Barbuda","FIPS":"AC","ISO1":"AG","ISO2":"ATG","ISONo":28,"capital":"Saint Johns","region":"Central America and the Caribbean","currency":"East Caribbean Dollar","currencyCode":"205","population":66970,"timeZone":-4.00,"timeZoneCode":"STD","ISDCode":"+1"},{"Id":5,"countryCode":"AI","CountryName":"Anguilla","FIPS":"AV","ISO1":"AI","ISO2":"AIA","ISONo":660,"capital":"The Valley","region":"Central America and the Caribbean","currency":"East Caribbean Dollar","currencyCode":"205","population":12132,"timeZone":-4.00,"timeZoneCode":"STD","ISDCode":"+1"},{"Id":6,"countryCode":"AL","CountryName":"Albania","FIPS":"AL","ISO1":"AL","ISO2":"ALB","ISONo":8,"capital":"Tirana","region":"Europe","currency":"Lek","currencyCode":"ALL","population":3510484,"timeZone":2.00,"timeZoneCode":"DST","ISDCode":"+355"},{"Id":7,"countryCode":"AM","CountryName":"Armenia","FIPS":"AM","ISO1":"AM","ISO2":"ARM","ISONo":51,"capital":"Yerevan","region":"Commonwealth of Independent States","currency":"Armenian Dram","currencyCode":"AMD","population":3336100,"timeZone":5.00,"timeZoneCode":"DST","ISDCode":"+374"},{"Id":8,"countryCode":"AN","CountryName":"Netherlands Antilles","FIPS":"NT","ISO1":"AN","ISO2":
I am trying to get output directly in JSON
When FOR JSON queries are returned to the client, the JSON text is returned as a single-column result set. The JSON is broken into fixed-length strings and sent over multiple rows.
It's really hard to see this properly in SSMS, as SSMS concatenates the results for you in "Results to Grid", and truncates each row in "Results to Text".
Why? Dunno. My guess is that only .NET clients know how to efficiently read large streams from SQL Server, and 99% of the time users will still just buffer the whole object. Breaking the JSON over multiple rows gives clients a simple API to read the data incrementally. And in .NET the fact that the de facto standard JSON library is not in the BCL means that SqlClient can't really have a first-class JSON API.
Anyway, from C#, you can use something like this to read the results:
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Common;
using System.Data.SqlClient;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace ConsoleApp3
{
class SqlJSONReader: TextReader
{
SqlDataReader rdr;
string currentLine = "";
int currentPos = 0;
public SqlJSONReader(SqlDataReader rdr)
{
this.rdr = rdr;
}
public override int Peek()
{
return GetChar(false);
}
public override int Read()
{
return GetChar(true);
}
public int GetChar(bool Advance)
{
while (currentLine.Length == currentPos)
{
if (!rdr.Read())
{
return -1;
}
currentLine = rdr.GetString(0);
currentPos = 0;
}
int rv = (int)currentLine[currentPos];
if (Advance) currentPos += 1;
return rv;
}
public override void Close()
{
rdr.Close();
}
}
class Program
{
static void Main(string[] args)
{
using (var con = new SqlConnection("server=.;database=master;Integrated Security=true"))
{
con.Open();
var sql = #"
select o.object_id as [obj.Id], replicate('n', 2000) as [obj.foo], c.name as [obj.col.name]
from sys.objects o
join sys.columns c
on c.object_id = o.object_id
for json path;
"
;
var cmd = new SqlCommand(sql, con);
var sr = new StringBuilder();
using (var rdr = cmd.ExecuteReader())
{
using (var tr = new SqlJSONReader(rdr))
{
using (var jr = new Newtonsoft.Json.JsonTextReader(tr))
{
while (jr.Read())
{
Console.WriteLine($" {jr.TokenType} : {jr.Value}");
}
}
}
}
Console.WriteLine(sr.ToString());
}
}
}
}
With thanks to #David Browne. I found I had to use 'print' instead of 'select'
declare #json varchar(max) = (SELECT * FROM dbo.AppSettings FOR JSON AUTO)
print #json
Separation of Concerns dictates returning a string and parsing the JSON separately. The below snippet can be used with no dependency on JSON.net which can be be used separately or a different Json Deserializer can be used (e.g. the one built into RestSharp) and does not require the SqlJSONReader class.
try {
using (var conn = new SqlConnection(connectionString))
using (var cmd = new SqlCommand(sql, conn)) {
await conn.OpenAsync();
logger.LogInformation("SQL:" + sql);
var rdr = await cmd.ExecuteReaderAsync().ConfigureAwait(false);
var result = "";
var moreRows = rdr.HasRows;
while (moreRows) {
moreRows = await rdr.ReadAsync();
if (moreRows) result += rdr.GetString(0);
}
return result;
}
}
catch (Exception ex) {
//logger.LogError($"Error accessing Db:{ex}");
return null;
}
sql Query result of for json path is a long string that is divided to multi column or block
something like this statement: "i want to get result of for json path"
"i want"+
" to ge"+
"t resu"+
"lt of "+
"for js"+
"on path"
each block is equal to max size of column in sql
so just get them all to a list
public IHttpActionResult GetAdvertises()
{
var rEQUEST = db.Database.SqlQuery<string>("SELECT
ID,CITY_NAME,JSON_QUERY(ALBUM) AS ALBUM FOR JSON PATH").ToList();
foreach(string req in rEQUEST)
{
HttpContext.Current.Response.Write(req);
}
return Ok();
}
If your query returned more then 2033 charters then there will be rows. Each row contains 2033 charters data another row contains remaining data. So you need to merge to get actual json. As seen below code sample.
dynamic jsonReturned = unitOfWork
.Database
.FetchProc<string>("storedProcedureGetSaleData", new { ProductId = productId });
if (Enumerable.Count(jsonReturned) == 0)
{
return null;
}
dynamic combinedJson = "";
foreach (var resultJsonRow in jsonReturned)
{
combinedJson += resultJsonRow;
}
return combinedJsonResult;
I got an error
Incorrect syntax near '.'. Incorrect syntax near the keyword 'with'. If this statement is a common table expression, an xmlnamespaces clause or a change tracking context clause, the previous statement must be terminated with a semicolon.
My code:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.SqlServer.Management.Smo;
using Microsoft.SqlServer.Management.Common;
using System.Data.SqlClient;
using System.Data;
using System.Windows.Forms;
using System.IO;
using System.Configuration;
using System.Threading;
namespace Cloths_Inventory
{
public partial class frmBackup : Form
{
//DataTable dtServers = SmoApplication.EnumAvailableSqlServers(true);
//private static Server srvr;
//private string DBpath = Application.StartupPath;
public frmBackup()
{
InitializeComponent();
}
private void button1_Click(object sender, EventArgs e)
{
bool bBackUpStatus = true;
Cursor.Current = Cursors.WaitCursor;
if (Directory.Exists(#"D:\SQLBackup"))
{
if (File.Exists(#"D:\SQLBackup\wcBackUp1.bak"))
{
if (MessageBox.Show(#"Do you want to replace it?", "Back", MessageBoxButtons.YesNo, MessageBoxIcon.Question) == DialogResult.Yes)
{
File.Delete(#"D:\SQLBackup\wcBackUp1.bak");
}
else
bBackUpStatus = false;
}
}
else
Directory.CreateDirectory(#"D:\SQLBackup");
if (bBackUpStatus)
{
//Connect to DB
SqlConnection connect;
string con = #"Data Source=.\SQLEXPRESS;AttachDbFilename=|DataDirectory|Garment.mdf;Integrated Security=True;Asynchronous Processing= True ;User Instance=True";
connect = new SqlConnection(con);
connect.Open();
//Execute SQL---------------
SqlCommand command;
command = new SqlCommand(#"backup database Garment.mdf to disk ='E:Garment.bak' with init,stats=10", connect);
command.ExecuteNonQuery();
//--------------
connect.Close();
MessageBox.Show("The support of the database was successfully performed", "Back", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
}
}
}
You cannot use the AttachDbFileName= approach and then use a server-based command like backup database....
If you want to back up your SQL Server database, it has to be attached to the server, and you need to connect to the server and issue that command:
// Connect to "master" database
string con = #"server=.\SQLEXPRESS;database=master;Integrated Security=True;";
SqlConnection connect = new SqlConnection(con);
SqlCommand ommand = new SqlCommand(#"backup database Garment to disk = N'E:\Garment.bak' with init,stats=10", connect);
connect.Open();
command.ExecuteNonQuery();
connect.Close();
You are missing a backslash E:Garment.bak should be E:\Garment.bak
command = new SqlCommand(#"backup database Garment.mdf to disk ='E:\Garment.bak' with init,stats=10", connect);
i am building a search index that contains special names - containing ! and ? and & and + and ... I have to tread the following searches different:
me & you
me + you
But whatever i do (did try with queryparser escaping before indexing, escaped it manually, tried different indexers...) - if i check the search index with Luke they do not show up (question marks and #-symbols and the like show up)
The logic behind is that i am doing partial searches for a live suggestion (and the fields are not that large) so i split it up into "m" and "me" and "+" and "y" and "yo" and "you" and then index it (that way it is way faster than a wildcard query search (and the index size is not a big problem).
So what i would need is to also have this special wildcard characters be inserted into the index.
This is my code:
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Lucene.Net.Analysis;
using Lucene.Net.Util;
namespace AnalyzerSpike
{
public class CustomAnalyzer : Analyzer
{
public override TokenStream TokenStream(string fieldName, TextReader reader)
{
return new ASCIIFoldingFilter(new LowerCaseFilter(new CustomCharTokenizer(reader)));
}
}
public class CustomCharTokenizer : CharTokenizer
{
public CustomCharTokenizer(TextReader input) : base(input)
{
}
public CustomCharTokenizer(AttributeSource source, TextReader input) : base(source, input)
{
}
public CustomCharTokenizer(AttributeFactory factory, TextReader input) : base(factory, input)
{
}
protected override bool IsTokenChar(char c)
{
return c != ' ';
}
}
}
The code to create the index:
private void InitIndex(string path, Analyzer analyzer)
{
var writer = new IndexWriter(path, analyzer, true);
//some multiline textbox that contains one item per line:
var all = new List<string>(txtAllAvailable.Text.Replace("\r","").Split('\n'));
foreach (var item in all)
{
writer.AddDocument(GetDocument(item));
}
writer.Optimize();
writer.Close();
}
private static Document GetDocument(string name)
{
var doc = new Document();
doc.Add(new Field(
"name",
DeNormalizeName(name),
Field.Store.YES,
Field.Index.ANALYZED));
doc.Add(new Field(
"raw_name",
name,
Field.Store.YES,
Field.Index.NOT_ANALYZED));
return doc;
}
(Code is with Lucene.net in version 1.9.x (EDIT: sorry - was 2.9.x) but is compatible with Lucene from Java)
Thx
Finally had the time to look into it again. And it was some stupid mistake in my denormalice method that did filter out single character parts (as it was in the beginning) and thus it did filter out the plus sign if surrounded by spaces :-/
Thx for your help though Moleski!
private static string DeNormalizeName(string name)
{
string answer = string.Empty;
var wordsOnly = Regex.Replace(name, "[^\\w0-9 ]+", string.Empty);
var filterText = (name != wordsOnly) ? name + " " + wordsOnly : name;
foreach (var subName in filterText.Split(' '))
{
if (subName.Length >= 1)
{
for (var j = 1; j <= subName.Length; j++)
{
answer += subName.Substring(0, j) + " ";
}
}
}
return answer.TrimEnd();
}