How to compare QML string from a SQLight DataBase - sql

I've being trying to create a match string function which will read a SQLLite (javascript created) Database from qml and match the string (web address given to it in my case), this is my Database file code:
.pragma library
var db;
// opens database at launch
function openDB()
{
db = openDatabaseSync("BookmarksDB","1.0","Bookmarks Database",1000000);
createTable();
}
// creates table if it doesn't exist, otherwise ignores
function createTable()
{
db.transaction(
function(tx) {
tx.executeSql("CREATE TABLE IF NOT EXISTS bookmarks (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT, url TEXT, creationdate TEXT, modified DATETIME)");
}
)
}
// deletes table
function dropTable()
{
db.transaction(
function(tx) {
tx.executeSql("DROP TABLE IF EXISTS bookmarks");
}
)
}
// creates a single bookmark record
function createBookmark(bookmarkItem)
{
db.transaction(
function(tx) {
tx.executeSql("INSERT INTO bookmarks (title, url, creationdate, modified) VALUES(?,?,?,?)",[bookmarkItem.title, bookmarkItem.url, bookmarkItem.creationdate, bookmarkItem.modified]);
}
)
}
// updates a single bookmark record
function updateBookmark(bookmarkItem)
{
db.transaction(
function(tx) {
tx.executeSql("UPDATE bookmarks SET title = ?, url = ?, creationdate = ?, modified = ? WHERE id = ?",
[bookmarkItem.title, bookmarkItem.url, bookmarkItem.creationdate, bookmarkItem.modified, bookmarkItem.id]);
}
)
}
// deletes a single bookmark record
function deleteBookmark(id)
{
db.transaction(
function(tx) {
tx.executeSql("DELETE FROM bookmarks WHERE id = ?", [id]);
}
)
}
// read list of bookmarks
function readBookmarkList(model)
{
model.clear();
var sqlstring = "SELECT id, title, url, creationdate FROM bookmarks";
db.readTransaction(
function(tx) {
var rs;
rs = tx.executeSql(sqlstring);
for (var i = 0; i < rs.rows.length; i++) {
model.append(rs.rows.item(i))
}
}
)
}
// read a single bookmark item
function readBookmarkItem(id) {
var data = {}
db.readTransaction(
function(tx) {
var rs = tx.executeSql("SELECT * FROM bookmarks WHERE id=?", [id])
if(rs.rows.length === 1) {
data = rs.rows.item(0)
}
}
)
return data;
}
// create a default bookmark item
function defaultItem()
{
return {title: "", url: "", creationdate: new Date(), modified: new Date()}
}
I was wanting to create something like,
function checkUrl(url){
if(dbvalues == url) {
return true
}
else{return false}
}
}
But I haven't a clue how to read all the data from the tables and get it to compare with the url given in the function.
Can somebody please help me out?
I'm a complete noob with SQL stuff
Using Qt Quick 1.1 on Symbian

Why don't you try something like
function checkURL(url) {
var exists
db.readTransaction(function(tx) {
var sql = "SELECT url FROM bookmarks WHERE url=? LIMIT 1";
var rs = tx.executeSql(sql, [url])
exists = rs.rows.length > 0
})
return exists
}
LIMIT 1 clause will tell DB engine to stop searching when it finds the first row matching your criteria.
Haven't tried this myself, but it should work.

Related

Problemas with API Key

I'm having some difficulties trying to access the ontologias of AgroPortal, it says my api key is not valid but I created an account and it was given to me an api key.
I'm trying to do like I did with BioPortal since the API is the same but with the BioPortal it works, my code is like this:
function getAgroPortalOntologies() {
var searchString = "http://data.agroportal.lirmm.fr/ontologies?apikey=72574b5d-b741-42a4-b449-4c1b64dda19a&display_links=false&display_context=false";
// we cache results and try to retrieve them on every new execution.
var cache = CacheService.getPrivateCache();
var text;
if (cache.get("ontologies_fragments") == null) {
text = UrlFetchApp.fetch(searchString).getContentText();
splitResultAndCache(cache, "ontologies", text);
} else {
text = getCacheResultAndMerge(cache, "ontologies");
}
var doc = JSON.parse(text);
var ontologies = doc;
var ontologyDictionary = {};
for (ontologyIndex in doc) {
var ontology = doc[ontologyIndex];
ontologyDictionary[ontology.acronym] = {"name":ontology.name, "uri":ontology["#id"]};
}
return sortOnKeys(ontologyDictionary);
}
var result2 = UrlFetchApp.fetch("http://data.agroportal.lirmm.fr/annotator", options).getContentText();
And what I did with BioPortal is very similar, I did this:
function getBioPortalOntologies() {
var searchString = "http://data.bioontology.org/ontologies?apikey=df3b13de-1ff4-4396-a183-80cc845046cb&display_links=false&display_context=false";
// we cache results and try to retrieve them on every new execution.
var cache = CacheService.getPrivateCache();
var text;
if (cache.get("ontologies_fragments") == null) {
text = UrlFetchApp.fetch(searchString).getContentText();
splitResultAndCache(cache, "ontologies", text);
} else {
text = getCacheResultAndMerge(cache, "ontologies");
}
var doc = JSON.parse(text);
var ontologies = doc;
var ontologyDictionary = {};
for (ontologyIndex in doc) {
var ontology = doc[ontologyIndex];
ontologyDictionary[ontology.acronym] = {"name":ontology.name, "uri":ontology["#id"]};
}
return sortOnKeys(ontologyDictionary);
}
var result = UrlFetchApp.fetch("http://data.bioontology.org/annotator", options).getContentText();
Can someone help me?
Thanks, my regards.

How to rollback a transaction in EntityFramework?

I have two database tables named Courses and Transactions.Courses stores the details of a particular course and Transactions table stores the details of the transactions performed by a particular user.
My question is how can I make sure that entry in the CourseTable is saved only when transactions(add,edit,delete) regarding that particular course is saved into the TransactionTable
CourseTable is
TransactionTable is
Controller is
POST: /Course/Add
[HttpPost]
public ActionResult Add(CourseVM _mdlCourseVM)
{
string actionName=this.ControllerContext.RouteData.Values["action"].ToString();
string controllerName=this.ControllerContext.RouteData.Values["controller"].ToString();
Course _course = new Course();
_course.Duration = _mdlCourseVM.Course.Duration;
_course.DurationMode = _mdlCourseVM.DurationModeId;
_course.InstalmentFee = _mdlCourseVM.Course.InstalmentFee;
_course.Name = _mdlCourseVM.Course.Name;
_course.SingleFee = _mdlCourseVM.Course.SingleFee;
_db.Courses.Add(_course);
int i = _db.SaveChanges();
if (i > 0)
{
Common _cmn=new Common();
//Add the transaction details
int k=_cmn.AddTransactions(actionName,controllerName,"");
//Want to commit changes to the coursetable here
if(k>0){
_db.commitTransaction()
}
//Want to rollback the committed transaction
else{
_db.rollbackTransaction();
}
}
}
I solved the above scenario by using System.Transactions.Hope anyone with same scenario find it useful.
using (TransactionScope _ts = new TransactionScope())
{
string actionName = this.ControllerContext.RouteData.Values["action"].ToString();
string controllerName = this.ControllerContext.RouteData.Values["controller"].ToString();
Course _course = new Course();
_course.Duration = _mdlCourseVM.Course.Duration;
_course.DurationMode = _mdlCourseVM.DurationModeId;
_course.InstalmentFee = _mdlCourseVM.Course.InstalmentFee;
_course.Name = _mdlCourseVM.Course.Name;
_course.SingleFee = _mdlCourseVM.Course.SingleFee;
_db.Courses.Add(_course);
int i = _db.SaveChanges();
if (i > 0)
{
Common _cmn = new Common();
int k = _cmn.AddTransactions(actionName, controllerName, "",Session["UserId"].ToString());
if (k > 0)
{
_ts.Complete();
return Json(new { message = "success" }, JsonRequestBehavior.AllowGet);
}
else
{
return Json(new { message = "error" }, JsonRequestBehavior.AllowGet);
}
}
else
{
return Json(new { message = "error" }, JsonRequestBehavior.AllowGet);
}
}
}

Google BigQuery returns only partial table data with C# application using .net Client Library

I am trying to execute the query (Basic select statement with 10 fields). My table contains more than 500k rows. C# application returns the response with only 4260 rows. However Web UI returns all the records.
Why my code returns only partial data, What is the best way to select all the records and load into C# Data Table? If there is any code snippet it would be more helpful to me.
using Google.Apis.Auth.OAuth2;
using System.IO;
using System.Threading;
using Google.Apis.Bigquery.v2;
using Google.Apis.Bigquery.v2.Data;
using System.Data;
using Google.Apis.Services;
using System;
using System.Security.Cryptography.X509Certificates;
namespace GoogleBigQuery
{
public class Class1
{
private static void Main()
{
try
{
Console.WriteLine("Start Time: {0}", DateTime.Now.ToString());
String serviceAccountEmail = "SERVICE ACCOUNT EMAIL";
var certificate = new X509Certificate2(#"KeyFile.p12", "notasecret", X509KeyStorageFlags.Exportable);
ServiceAccountCredential credential = new ServiceAccountCredential(
new ServiceAccountCredential.Initializer(serviceAccountEmail)
{
Scopes = new[] { BigqueryService.Scope.Bigquery, BigqueryService.Scope.BigqueryInsertdata, BigqueryService.Scope.CloudPlatform, BigqueryService.Scope.DevstorageFullControl }
}.FromCertificate(certificate));
BigqueryService Service = new BigqueryService(new BaseClientService.Initializer()
{
HttpClientInitializer = credential,
ApplicationName = "PROJECT NAME"
});
string query = "SELECT * FROM [publicdata:samples.shakespeare]";
JobsResource j = Service.Jobs;
QueryRequest qr = new QueryRequest();
string ProjectID = "PROJECT ID";
qr.Query = query;
qr.MaxResults = Int32.MaxValue;
qr.TimeoutMs = Int32.MaxValue;
DataTable DT = new DataTable();
int i = 0;
QueryResponse response = j.Query(qr, ProjectID).Execute();
string pageToken = null;
if (response.JobComplete == true)
{
if (response != null)
{
int colCount = response.Schema.Fields.Count;
if (DT == null)
DT = new DataTable();
if (DT.Columns.Count == 0)
{
foreach (var Column in response.Schema.Fields)
{
DT.Columns.Add(Column.Name);
}
}
pageToken = response.PageToken;
if (response.Rows != null)
{
foreach (TableRow row in response.Rows)
{
DataRow dr = DT.NewRow();
for (i = 0; i < colCount; i++)
{
dr[i] = row.F[i].V;
}
DT.Rows.Add(dr);
}
}
Console.WriteLine("No of Records are Readed: {0} # {1}", DT.Rows.Count.ToString(), DateTime.Now.ToString());
while (true)
{
int StartIndexForQuery = DT.Rows.Count;
Google.Apis.Bigquery.v2.JobsResource.GetQueryResultsRequest SubQR = Service.Jobs.GetQueryResults(response.JobReference.ProjectId, response.JobReference.JobId);
SubQR.StartIndex = (ulong)StartIndexForQuery;
//SubQR.MaxResults = Int32.MaxValue;
GetQueryResultsResponse QueryResultResponse = SubQR.Execute();
if (QueryResultResponse != null)
{
if (QueryResultResponse.Rows != null)
{
foreach (TableRow row in QueryResultResponse.Rows)
{
DataRow dr = DT.NewRow();
for (i = 0; i < colCount; i++)
{
dr[i] = row.F[i].V;
}
DT.Rows.Add(dr);
}
}
Console.WriteLine("No of Records are Readed: {0} # {1}", DT.Rows.Count.ToString(), DateTime.Now.ToString());
if (null == QueryResultResponse.PageToken)
{
break;
}
}
else
{
break;
}
}
}
else
{
Console.WriteLine("Response is null");
}
}
int TotalCount = 0;
if (DT != null && DT.Rows.Count > 0)
{
TotalCount = DT.Rows.Count;
}
else
{
TotalCount = 0;
}
Console.WriteLine("End Time: {0}", DateTime.Now.ToString());
Console.WriteLine("No. of records readed from google bigquery service: " + TotalCount.ToString());
}
catch (Exception e)
{
Console.WriteLine("Error Occurred: " + e.Message);
}
Console.ReadLine();
}
}
}
In this Sample Query get the results from public data set, In table contains 164656 rows but response returns 85000 rows only for the first time, then query again to get the second set of results. (But not known this is the only solution to get all the results).
In this sample contains only 4 fields, even-though it does not return all rows, in my case table contains more than 15 fields, I get response of ~4000 rows out of ~10k rows, I need to query again and again to get the remaining results for selecting 1000 rows takes time up to 2 minutes in my methodology so I am expecting best way to select all the records within single response.
Answer from User #:Pentium10
There is no way to run a query and select a large response in a single shot. You can either paginate the results, or if you can create a job to export to files, then use the files generated in your app. Exporting is free.
Step to run a large query and export results to files stored on GCS:
1) Set allowLargeResults to true in your job configuration. You must also specify a destination table with the allowLargeResults flag.
Example:
"configuration":
{
"query":
{
"allowLargeResults": true,
"query": "select uid from [project:dataset.table]"
"destinationTable": [project:dataset.table]
}
}
2) Now your data is in a destination table you set. You need to create a new job, and set the export property to be able to export the table to file(s). Exporting is free, but you need to have Google Cloud Storage activated to put the resulting files there.
3) In the end you download your large files from GCS.
It my turn to design the solution for better results.
Hoping this might help someone. One could retrieve next set of paginated result using PageToken. Here is the sample code for how to use PageToken. Although, I liked the idea of exporting for free. Here, I write rows to flat file but you could add them to your DataTable. Obviously, it is a bad idea to keep large DataTable in memory though.
public void ExecuteSQL(BigqueryService bqservice, String ProjectID)
{
string sSql = "SELECT r.Dealname, r.poolnumber, r.loanid FROM [MBS_Dataset.tblRemitData] R left join each [MBS_Dataset.tblOrigData] o on R.Dealname = o.Dealname and R.Poolnumber = o.Poolnumber and R.LoanID = o.LoanID Order by o.Dealname, o.poolnumber, o.loanid limit 100000";
QueryRequest _r = new QueryRequest();
_r.Query = sSql;
QueryResponse _qr = bqservice.Jobs.Query(_r, ProjectID).Execute();
string pageToken = null;
if (_qr.JobComplete != true)
{
//job not finished yet! expecting more data
while (true)
{
var resultReq = bqservice.Jobs.GetQueryResults(_qr.JobReference.ProjectId, _qr.JobReference.JobId);
resultReq.PageToken = pageToken;
var result = resultReq.Execute();
if (result.JobComplete == true)
{
WriteRows(result.Rows, result.Schema.Fields);
pageToken = result.PageToken;
if (pageToken == null)
break;
}
}
}
else
{
List<string> _fieldNames = _qr.Schema.Fields.ToList().Select(x => x.Name).ToList();
WriteRows(_qr.Rows, _qr.Schema.Fields);
}
}
The Web UI automatically flattens the data. This means that you see multiple rows for each nested field.
When you run the same query via the API, it won't be flattened, and you get fewer rows, as the nested fields are returned as objects. You should check if this is the case at you.
The other is that indeed you need to paginate through the results. Paging through list results has this explained.
If you want to do only one job, than you should write your query ouput to a table, than export the table as JSON, and download the export from GCS.

Yii check scenario in beforeSave()

For one action I need transform $album_id before save it to DB
in model function beforeSave() i do:
// преобразовать album -> album_id
$album_id=array();
foreach($this->string2array($this->album, '\|') as $one)
$album_id[]=Album::model()->findByAttributes(array('album' => $one))->id;
$this->album_id = $this->array2string($album_id);
but for another action I don't need this transform, because $album_id is already in proper state. So I set scenario 'batchcreate' in that action:
public function actionCreate()
{
Yii::import('ext.multimodelform.MultiModelForm');
$model = new Album('create');
$song = new Song();
$song->setScenario('batchcreate');
...
}
and try to check this scenario in model:
if(!($this->scenario === 'batchcreate')) {
// преобразовать album -> album_id
$album_id=array();
foreach($this->string2array($this->album, '\|') as $one)
$album_id[]=Album::model()->findByAttributes(array('album' => $one))->id;
$this->album_id = $this->array2string($album_id);
}
but the condition is always true. Why my scenario doesn't set or doesn't check in if statement?
Or maybe it's better to check not scenario, but make another variable, so how to set its value for 2 different cases?
my whole beforeSave():
protected function beforeSave()
{
if(parent::beforeSave())
{
// преобразовать whoes -> who
$who=array();
foreach($this->string2array($this->whoes) as $one) {
$userrow = User::model()->findByAttributes(array('username' => $one));
if($userrow) $who[]=CHtml::encode($userrow->id);
else $who[]=$one;
}
$this->who = $this->array2string($who);
//var_dump( $this->scenario );
if(!($this->scenario == 'batchcreate')) {
//if($this->notbatchcreate == 'yes') {
// преобразовать album -> album_id
$album_id=array();
foreach($this->string2array($this->album, '\|') as $one)
$album_id[]=Album::model()->findByAttributes(array('album' => $one))->id;
$this->album_id = $this->array2string($album_id);
}
return true;
}
else
return false;
}
Instead of
$song = new Song();
$song->setScenario('batchcreate');
you can simply do
$song = new Song('batchcreate');
In beforeSave()
if ( $this->scenario != 'batchcreate' ) {
echo "good - scenario is not batchcreate";
die();
}
echo 'nope...';
var_dump($this->scenario);
die();
Switch the order: call parent::beforeSave() after your code for checking the scenario. The inherited method beforeSave() may be altering your scenario.

optimize a SQL query for a couchdb view

How to optimize this SQL query for a couchdb view ?
SELECT * FROM db WHERE user = '$userid' OR userFollowed = '$userid'
The couchdb database contains this structure of documents:
_id
user
userFollowed
This because a user can follows another and viceversa and my scope is to get all followers of user A that this user which follows it turn, for example:
A follows B
B follows A
In this example I need to get B, enstabilishing that both users are followers... I know it's complex to explain and understand but I'll try with the things I'm doing with node.js and cradle.
The view map:
function (doc) {
emit(doc.user, doc.userFollowed)
}
The node.js code:
db.view("followers/getFollowers", function(err, resp) {
if (!err) {
var followers = [];
resp.forEach(function(key, value, id) {
var bothFollow = false;
if (key == userID) {
if (followers.indexOf(value) == -1) {
resp.forEach(function(key, value, id) {
if (value == userID)
bothFollow = true;
});
if (bothFollow)
followers.push(value);
}
} else if (value == userID) {
if (followers.indexOf(key) == -1) {
resp.forEach(function(key, value, id) {
if (key == userID)
bothFollow = true;
});
if (bothFollow)
followers.push(key);
}
}
});
console.log(followers);
}
});
So in the code first I check if the A or B values corrispondes to the other user, then check with another loop if there is a relationship putting the follower in the list
All this code works but I don't think that's the correct procedure and maybe I'm wrong anything :( can you help me please ?
It is easier to emit both users in the view function:
function (doc) {
emit(doc.user, null);
emit(doc.userFollowed, null);
}
Than you can just call the view and will get a plain list:
http://localhost:5984/db/_design/app/_view/followers?include_docs=true