The database X is currently being loaded, but after 5 seconds, this request has been aborted - ravendb

I am getting the error in the title when executing the code below:
var documentStore = new DocumentStore
{
ConnectionStringName = RavenDbInstance
};
documentStore.Initialize();
using (var session = documentStore.OpenSession())
{
registered = session.Query<TivoBoxMigrationServiceModel>()
.FirstOrDefault(x => x.AccountNumber == accountNumber && x.AreaCode == areaCode);
}
This does not happen all the time, maybe 2-3 times a day.

Related

Improve Batch API Requests Response Time

I am sending some requests to an API in a batch in a .Net core 3.1 project, response time is very quick but Is there anything further I can do? In my actual scenario, I will be sending 700 requests with 5 requests at a time.
static async Task ThreadRequets()
{
List<string> userIds1 = new List<string>() { "1", "2", "3", "4","2757","2756" };
Stopwatch watch = new Stopwatch();
watch.Start();
foreach (var batch in BuildChunksWithLinqAndYield(userIds1, 5))
{
var tasks = batch.Select(id => getUser(id));
var users = await Task.WhenAll(tasks);
}
watch.Stop();
}
static IEnumerable<IEnumerable<T>> BuildChunksWithLinqAndYield<T>(List<T> fullList, int batchSize)
{
int total = 0;
while (total < fullList.Count)
{
yield return fullList.Skip(total).Take(batchSize);
total += batchSize;
}
}
static async Task<string> getUser(string userID)
{
using (var restClient = new RestClient($"https://gorest.co.in/public/v2/users/{userID}"))
{
var request = new RestRequest();
request.AddHeader("Authorization", "Bearer mytoken");
RestResponse response = await restClient.GetAsync(request);
JObject result = JObject.Parse(response.Content);
string name = result["name"].ToString();
return name;
}
}

RavenDB OnBeforeStore does not fire using BulkInsert

I cannot get OnBeforeStore to fire using a BulkInsert.
It fires fine during a regular Store operation.
I'm trying to use an Invoice number generator to add a formatted number and I'd like to do that in OnBeforeStore.
See code example:
static async Task GenerateInvoiceTest()
{
using var store = new DocumentStore
{
Urls = new string[] { "https://localhost:8080" },
Database = "APC",
};
//this never fires using BulkInsert
store.OnBeforeStore += (s, e) =>
{
if (!(e.Entity is Invoice invoice)) return;
if (invoice.InvoiceNumber != 0) return;
invoice.InvoiceNumber = new Random().Next(1, 1000);
};
store.Initialize();
//sample invoices
var invoices = new List<Invoice>
{
new Invoice { StartDate = DateTime.Now, EndDate = DateTime.Now.AddDays(3) },
new Invoice { StartDate = DateTime.Now, EndDate = DateTime.Now.AddDays(3) },
};
//bulk insert test
using var session = store.OpenAsyncSession();
using var bulkInsert = store.BulkInsert();
invoices.ForEach(i => bulkInsert.Store(i));
//this does NOT fire OnBeforeStore
await session.SaveChangesAsync();
foreach (var invoice in invoices)
{
//always prints 0
Console.WriteLine(invoice.InvoiceNumber);
}
//regular test
var otherInvoice = new Invoice { StartDate = DateTime.Now, EndDate = DateTime.Now.AddDays(3) };
await session.StoreAsync(otherInvoice);
//this DOES fire OnBeforeStore
await session.SaveChangesAsync();
}
OnBeforeStore is invoked as part of the Session SaveChanges method
See this documentation about OnBeforeStore
http://localhost:54391/docs/article-page/5.0/Csharp/client-api/session/how-to/subscribe-to-events
The event takes argument BeforeStoreEventArgs that consists of the Session entity's ID and the entity itself.
You define OnBeforeStore on the 'Store' but it is Not for use with bulkInsert.
It is for when saving from a Session.
BulkInsert operates on the Store itself, not on the Session

Azure Logic Apps internal server error 500

Am trying to create a an azure function that is triggered in a Logic Apps,
The functions purpose is to web crawl certain web sites, take the desired information, compare that with a a SQL Server database in Azure, compare if we already have that information if not add it.
My issue is that when ever i run it I get the Server 500 error, I assume its accessing the database that cause. Help?
public static async Task<IActionResult> Run(
[HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req, ILogger log
)
{
log.LogInformation("C# HTTP trigger function processed a request.");
string RequestBody = await new StreamReader(req.Body).ReadToEndAsync();
{
return await CrawlBlog(0, RequestBody);
}
}
private static async Task<IActionResult> CrawlBlog(int Picker, string req)
{
int BlogPicker = Picker;
string TheResult = req;
//Get the url we want to test
var Url = "";
if (BlogPicker == 0)
{
Url = "*********";
}
else if (BlogPicker == 1)
{
Url = "*********";
}
/*
else if (BlogPicker == 2)
{
Url = "https://azure.microsoft.com/en-in/blog/?utm_source=devglan";
}
*/
else
{
TheResult = "False we got a wrong pick";
return (ActionResult)new OkObjectResult
( new {TheResult });
}
var httpClient = new HttpClient();
var html = await httpClient.GetStringAsync(Url);
var htmlDocument = new HtmlDocument();
htmlDocument.LoadHtml(html);
//a list to add all availabel blogs we found
var Blog = new List<BlogStats>();
switch (BlogPicker)
{
case 0:
{
var divs =
htmlDocument.DocumentNode.Descendants("div")
.Where(node => node.GetAttributeValue("class", "").Equals("home_blog_sec_text")).ToList();
foreach (var divo in divs)
{
var Blogo = new BlogStats
{
Summary = divo.Descendants("p").FirstOrDefault().InnerText,
Link = divo.Descendants("a").FirstOrDefault().ChildAttributes("href").FirstOrDefault().Value,
Title = divo.Descendants("a").FirstOrDefault().InnerText
};
Blog.Add(Blogo);
}
break;
}
case 1:
{
var divs =
htmlDocument.DocumentNode.Descendants("div")
.Where(node => node.GetAttributeValue("class", "").Equals("post_header_title two_third last")).ToList();
foreach (var divo in divs)
{
//string TheSummary = "we goofed";
var ThePs = divo.Descendants("p").ToList();
var Blogo = new BlogStats
{
Summary = ThePs[1].GetDirectInnerText(),
Link = divo.Descendants("a").LastOrDefault().ChildAttributes("href").FirstOrDefault().Value,
Title = divo.Descendants("a").FirstOrDefault().InnerText
};
Blog.Add(Blogo);
}
break;
}
}
TheResult = await SqlCheck(Blog[0].Title, Blog[0].Summary, Blog[0].Link); //error 500
return (ActionResult)new OkObjectResult
(
new
{
TheResult
}
);
}
public static async Task<string> SqlCheck(string Tit, string Sumy, string Lin)
{
SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder();
builder.DataSource = "flygon.database.windows.net";
builder.UserID = "*****";
builder.Password = "********";
builder.InitialCatalog = "torkoal";
System.Data.DataSet ds = new System.Data.DataSet();
SqlConnection connection = new SqlConnection(builder.ConnectionString);
connection.Open();
SqlCommand CheckCommand = new SqlCommand("SELECT * FROM TableBoto WHERE Link = #id3 ", connection);
CheckCommand.Parameters.AddWithValue("#id3", Lin);
SqlDataAdapter dataAdapter = new SqlDataAdapter(CheckCommand);
dataAdapter.Fill(ds);
int i = ds.Tables[0].Rows.Count;
if (i > 0)
{
return $" We got a Duplicates in title : {Tit}";
}
try
{
{
string query = $"insert into TableBoto(Title,Summary,Link) values('{Tit}','{Sumy}','{Lin}');";
SqlCommand command = new SqlCommand(query, connection);
SqlDataReader reader = await command.ExecuteReaderAsync();
reader.Close();
}
}
catch (SqlException)
{
// Console.WriteLine(e.ToString());
}
connection.Close();
return $" Success Ign +{Tit} + Ign {Sumy}+ Ign {Lin} Ign Success SQL ";
}
}
500 HTTP status code is a generic code which means that the server was not able to process the request due to some issues, First step would be to add some exception handling to your function and see if the failure occurs and where it occurs.
On Side note, you should not use HTTP client in the way used in the code, you should not new it up every time your function executes, this client should be static in nature. Refer Manage connections in Azure Functions

RavenDb expectation of performance to query documents that number in the millions

I was able to load a couple of million documents with the embedded version of RavenDb, pretty slick!.
Now I'm trying to query those items and am finding that the performance is not what I had expected, near instantaneous if possible, but instead upwards of 18 seconds on a fairly beefy machine.
Below, you'll find my naive code.
Note: I have now resolved this, and the final code is at the bottom of the post. The take away is that you need indexes, they have to be of the right type, and RavenDB needs to be made aware of them. VERY pleased with the perf and quality of the returned records via the query engine.
Thank you,
Stephen
using (var store = new EmbeddableDocumentStore { DataDirectory = #"C:\temp\ravendata" }.Initialize())
{
using (IDocumentSession session = store.OpenSession())
{
var q = session.Query<Product>().Where(x => x.INFO2.StartsWith("SYS")).ToList();
}
}
[Serializable]
public class Product
{
public decimal ProductId { get; set; }
....
public string INFO2 { get; set; }
}
EDIT
I added this class
public class InfoIndex_Search : AbstractIndexCreationTask<Product>
{
public InfoIndex_Search()
{
Map = products =>
from p in products
select new { Info2Index = p.INFO2 };
Index(x => x.INFO2, FieldIndexing.Analyzed);
}
}
and changed the calling method to look like this.
using (var store = new EmbeddableDocumentStore { DataDirectory = #"C:\temp\ravendata" }.Initialize())
{
// Tell Raven to create our indexes.
IndexCreation.CreateIndexes(Assembly.GetExecutingAssembly(), store);
List<Product> q = null;
using (IDocumentSession session = store.OpenSession())
{
q = session.Query<Product>().Where(x => x.INFO2.StartsWith("SYS")).ToList();
watch.Stop();
}
}
But I'm still reporting 18 Seconds to do the search. What am I missing? On another note, there are quite a few new files in the C:\temp\ravendata\Indexes\InfoIndex%2fSearch folder, although not near as many as when I inserted the data, they seems to have all but disappeared after running this code a few times trying to query. Should the IndexCreation.CreateIndexes(Assembly.GetExecutingAssembly(), store); be called prior to insert, and only then?
EDIT1
Using this code I was able to get the query to happen almost in an instance, but it seems you can only run this once, so that begs the question. Where does this get run and what are the proper initialization procedures?
store.DatabaseCommands.PutIndex("ProdcustByInfo2", new IndexDefinitionBuilder<Product>
{
Map = products => from product in products
select new { product.INFO2 },
Indexes =
{
{ x => x.INFO2, FieldIndexing.Analyzed}
}
});
EDIT2: working example
static void Main()
{
Stopwatch watch = Stopwatch.StartNew();
int q = 0;
using (var store = new EmbeddableDocumentStore { DataDirectory = #"C:\temp\ravendata" }.Initialize())
{
if (store.DatabaseCommands.GetIndex("ProdcustByInfo2") == null)
{
store.DatabaseCommands.PutIndex("ProdcustByInfo2", new IndexDefinitionBuilder<Product>
{
Map = products => from product in products
select new { product.INFO2 },
Indexes = { { x => x.INFO2, FieldIndexing.Analyzed } }
});
}
watch.Stop();
Console.WriteLine("Time elapsed to create index {0}{1}", watch.ElapsedMilliseconds, System.Environment.NewLine);
watch = Stopwatch.StartNew();
using (IDocumentSession session = store.OpenSession())
{
q = session.Query<Product>().Count();
}
watch.Stop();
Console.WriteLine("Time elapsed to query for products values {0}{1}", watch.ElapsedMilliseconds, System.Environment.NewLine);
Console.WriteLine("Total number of products loaded: {0}{1}", q, System.Environment.NewLine);
if (q == 0)
{
watch = Stopwatch.StartNew();
var productsList = Parsers.GetProducts().ToList();
watch.Stop();
Console.WriteLine("Time elapsed to parse: {0}{1}", watch.ElapsedMilliseconds, System.Environment.NewLine);
Console.WriteLine("Total number of items parsed: {0}{1}", productsList.Count, System.Environment.NewLine);
watch = Stopwatch.StartNew();
productsList.RemoveAll(_ => _ == null);
watch.Stop();
Console.WriteLine("Time elapsed to remove null values {0}{1}", watch.ElapsedMilliseconds, System.Environment.NewLine);
Console.WriteLine("Total number of items loaded: {0}{1}", productsList.Count, System.Environment.NewLine);
watch = Stopwatch.StartNew();
int batch = 0;
var session = store.OpenSession();
foreach (var product in productsList)
{
batch++;
session.Store(product);
if (batch % 128 == 0)
{
session.SaveChanges();
session.Dispose();
session = store.OpenSession();
}
}
session.SaveChanges();
session.Dispose();
watch.Stop();
Console.WriteLine("Time elapsed to populate db from collection {0}{1}", watch.ElapsedMilliseconds, System.Environment.NewLine);
}
watch = Stopwatch.StartNew();
using (IDocumentSession session = store.OpenSession())
{
q = session.Query<Product>().Where(x => x.INFO2.StartsWith("SYS")).Count();
}
watch.Stop();
Console.WriteLine("Time elapsed to query for term {0}{1}", watch.ElapsedMilliseconds, System.Environment.NewLine);
Console.WriteLine("Total number of items found: {0}{1}", q, System.Environment.NewLine);
}
Console.ReadLine();
}
First, do you have an index covering INFO2?
Second, see Daniel Lang's "Searching on string properties in RavenDB" blog post here:
http://daniellang.net/searching-on-string-properties-in-ravendb/
If it helps, here's how I created an index:
public class LogMessageCreatedTime : AbstractIndexCreationTask<LogMessage>
{
public LogMessageCreatedTime()
{
Map = messages => from message in messages
select new { MessageCreatedTime = message.MessageCreatedTime };
}
}
And how I added it at runtime:
private static DocumentStore GetDatabase()
{
DocumentStore documentStore = new DocumentStore();
try
{
documentStore.ConnectionStringName = "RavenDb";
documentStore.Initialize();
// Tell Raven to create our indexes.
IndexCreation.CreateIndexes(typeof(DataAccessFactory).Assembly, documentStore);
}
catch
{
documentStore.Dispose();
throw;
}
return documentStore;
}
In my case, I didn't have to query the index explicitly; it was just used when I queried normally.
As Bob hints at, you should ensure you create indexes in Raven that cover the fields you intend to query.
Raven is quite fast, and can let you go quite a way without needing to do much.
However once you start getting into large-ish document numbers, or need something non-default, you will find that you need static indexes.
There are plenty of examples on setting up and using indexes in Raven.

How to programmatically set the task outcome (task response) of a Nintex Flexi Task?

Is there any way of set a Nintex Flexi task completion through Sharepoint's web services? We have tried updating the "WorkflowOutcome", "ApproverComments" and "Status" fields without success (actually the comments and status are successfully updated, however I can find no way of updating the WorkflowOutcome system field).
I can't use the Nintex Web service (ProcessTaskResponse) because it needs the task's assigned user's credentials (login, password, domain).
The Asp.net page doesn't have that information, it has only the Sharepoint Administrator credentials.
One way is to delegate the task to the admin first, and then call ProcessTaskResponse, but it isn't efficient and is prone to errors.
In my tests so far, any update (UpdateListItems) to the WorkflowOutcome field automatically set the Status field to "Completed" and the PercentComplete field to "1" (100%), ending the task (and continuing the flow), but with the wrong answer: always "Reject", no matter what I try to set it to.
Did you try this code: (try-cacth block with redirection does the trick)
\\set to actual outcome id here, for ex. from OutComePanel control
taskItem[Nintex.Workflow.Common.NWSharePointObjects.FieldDecision] = 0;
taskItem[Nintex.Workflow.Common.NWSharePointObjects.FieldComments] = " Some Comments";
taskItem.Update();
try
{
Nintex.Workflow.Utility.RedirectOrCloseDialog(HttpContext.Current, Web.Url);
}
catch
{
}
?
Here are my code to change outcome of nintex flexi task. My problem is permission. I had passed admin token to site. It's solve the problem.
var siteUrl = "...";
using (var tempSite = new SPSite(siteUrl))
{
var sysToken = tempSite.SystemAccount.UserToken;
using (var site = new SPSite(siteUrl, sysToken))
{
var web = site.OpenWeb();
...
var cancelled = "Cancelled";
task.Web.AllowUnsafeUpdates = true;
Hashtable ht = new Hashtable();
ht[SPBuiltInFieldId.TaskStatus] = SPResource.GetString(new CultureInfo((int)task.Web.Language, false), Strings.WorkflowStatusCompleted, new object[0]);
ht["Completed"] = true;
ht["PercentComplete"] = 1;
ht["Status"] = "Completed";
ht["WorkflowOutcome"] = cancelled;
ht["Decision"] = CommonHelper.GetFlexiTaskOutcomeId(task, cancelled);
ht["ApproverComments"] = "cancelled";
CommonHelper.AlterTask((task as SPListItem), ht, true, 5, 100);
task.Web.AllowUnsafeUpdates = false;
}
}
}
}
}
}
public static string GetFlexiTaskOutcomeId(Microsoft.SharePoint.Workflow.SPWorkflowTask task, string outcome)
{
if (task["MultiOutcomeTaskInfo"] == null)
{
return string.Empty;
}
string xmlOutcome = HttpUtility.HtmlDecode(task["MultiOutcomeTaskInfo"].ToString());
if (string.IsNullOrEmpty(xmlOutcome))
{
return string.Empty;
}
XmlDocument doc = new XmlDocument();
doc.LoadXml(xmlOutcome);
var node = doc.SelectSingleNode(string.Format("/MultiOutcomeResponseInfo/AvailableOutcomes/ConfiguredOutcome[#Name='{0}']", outcome));
return node.Attributes["Id"].Value;
}
public static bool AlterTask(SPListItem task, Hashtable htData, bool fSynchronous, int attempts, int milisecondsTimeout)
{
if ((int)task[SPBuiltInFieldId.WorkflowVersion] != 1)
{
SPList parentList = task.ParentList.ParentWeb.Lists[new Guid(task[SPBuiltInFieldId.WorkflowListId].ToString())];
SPListItem parentItem = parentList.Items.GetItemById((int)task[SPBuiltInFieldId.WorkflowItemId]);
for (int i = 0; i < attempts; i++)
{
SPWorkflow workflow = parentItem.Workflows[new Guid(task[SPBuiltInFieldId.WorkflowInstanceID].ToString())];
if (!workflow.IsLocked)
{
task[SPBuiltInFieldId.WorkflowVersion] = 1;
task.SystemUpdate();
break;
}
if (i != attempts - 1)
{
Thread.Sleep(milisecondsTimeout);
}
}
}
var result = SPWorkflowTask.AlterTask(task, htData, fSynchronous);
return result;
}