How to write a tag-helper for alphabetical paging - asp.net-core

I came across the following article https://www.mikesdotnetting.com/article/256/entity-framework-recipe-alphabetical-paging-in-asp-net-mvc describing how to generate paging links from the data instead of the alphabet in a asp.net application.
The solution shown here is based on html helpers.
How can I implement this feature using tag helpers instead?
I'm using asp.net core 1.1.
The code I'm referrig to is:
public static class HtmlHelpers
{
public static HtmlString AlphabeticalPager(this HtmlHelper html, string selectedLetter, IEnumerable<string> firstLetters, Func<string, string> pageLink)
{
var sb = new StringBuilder();
var numbers = Enumerable.Range(0, 10).Select(i => i.ToString());
var alphabet = Enumerable.Range(65, 26).Select(i => ((char)i).ToString()).ToList();
alphabet.Insert(0, "All");
alphabet.Insert(1, "0-9");
var ul = new TagBuilder("ul");
ul.AddCssClass("pagination");
ul.AddCssClass("alpha");
foreach (var letter in alphabet)
{
var li = new TagBuilder("li");
if (firstLetters.Contains(letter) || (firstLetters.Intersect(numbers).Any() && letter == "0-9") || letter == "All")
{
if (selectedLetter == letter || selectedLetter.IsEmpty() && letter == "All")
{
li.AddCssClass("active");
var span = new TagBuilder("span");
span.SetInnerText(letter);
li.InnerHtml = span.ToString();
}
else
{
var a = new TagBuilder("a");
a.MergeAttribute("href", pageLink(letter));
a.InnerHtml = letter;
li.InnerHtml = a.ToString();
}
}
else
{
li.AddCssClass("inactive");
var span = new TagBuilder("span");
span.SetInnerText(letter);
li.InnerHtml = span.ToString();
}
sb.Append(li.ToString());
}
ul.InnerHtml = sb.ToString();
return new HtmlString(ul.ToString());
}
}
Any idea how to proceed?

Related

Syntax Highlighting for go in vb.net

Ok so I have been making a simple code editor in vb.net for go.. (for personal uses)
I tried this code -
Dim tokens As String = "(break|default|func|interface|select|case|defer|go|map|struct|chan|else|goto|package|switch|const|fallthrough|if|range|type|continue|for|import|return|var)"
Dim rex As New Regex(tokens)
Dim mc As MatchCollection = rex.Matches(TextBox2.Text)
Dim StartCursorPosition As Integer = TextBox2.SelectionStart
For Each m As Match In mc
Dim startIndex As Integer = m.Index
Dim StopIndex As Integer = m.Length
TextBox2.[Select](startIndex, StopIndex)
TextBox2.SelectionColor = Color.FromArgb(0, 122, 204)
TextBox2.SelectionStart = StartCursorPosition
TextBox2.SelectionColor = Color.RebeccaPurple
Next
but I couldn't add something like print statements say I want a fmt.Println("Hello World"), that is not possible, anyone help me?
I want a simple result that will do proper syntax without glitching text colors like this current code does.
Here's a code showing how to update highlighting with strings and numbers.
You would need to tweak it further to support syntax like comments, etc.
private Regex BuildExpression()
{
string[] exprs = {
"(break|default|func|interface|select|case|defer|go|map|struct|chan|else|goto|package|switch|const|fallthrough|if|range|type|continue|for|import|return|var)",
#"([0-9]+\.[0-9]*(e|E)(\+|\-)?[0-9]+)|([0-9]+\.[0-9]*)|([0-9]+)",
"(\"\")|\"((((\\\\\")|(\"\")|[^\"])*\")|(((\\\\\")|(\"\")|[^\"])*))"
};
StringBuilder sb = new StringBuilder();
for (int i = 0; i < exprs.Length; i++)
{
string expr = exprs[i];
if ((expr != null) && (expr != string.Empty))
sb.Append(string.Format("(?<{0}>{1})", "_" + i.ToString(), expr) + "|");
}
if (sb.Length > 0)
sb.Remove(sb.Length - 1, 1);
RegexOptions options = RegexOptions.ExplicitCapture | RegexOptions.IgnorePatternWhitespace | RegexOptions.Singleline | RegexOptions.Compiled | RegexOptions.IgnoreCase;
return new Regex(sb.ToString(), options);
}
private void HighlightSyntax()
{
var colors = new Dictionary<int, Color>();
var expression = BuildExpression();
Color[] clrs = { Color.Teal, Color.Red, Color.Blue };
int[] intarray = expression.GetGroupNumbers();
foreach (int i in intarray)
{
var name = expression.GroupNameFromNumber(i);
if ((name != null) && (name.Length > 0) && (name[0] == '_'))
{
var idx = int.Parse(name.Substring(1));
if (idx < clrs.Length)
colors.Add(i, clrs[idx]);
}
}
foreach (Match match in expression.Matches(richTextBox1.Text))
{
int index = match.Index;
int length = match.Length;
richTextBox1.Select(index, length);
for (int i = 0; i < match.Groups.Count; i++)
{
if (match.Groups[i].Success)
{
if (colors.ContainsKey(i))
{
richTextBox1.SelectionColor = colors[i];
break;
}
}
}
}
}
What we found during development of our Code Editor libraries, is that the regular expression-based parsers are hard to adapt to fully support advanced syntax like contextual keywords (LINQ) or interpolated strings.
You might find a bit more information here:
https://www.alternetsoft.com/blog/code-parsing-explained
The most accurate syntax highlighting for VB.NET can be implemented using Microsoft.CodeAnalysis API, it's the same API used internally by Visual Studio text editor.
Below is sample code showing how to get classified spans for VB.NET code (every span contains start/end position within the text and classification type, i.e. keyword, string, etc.). These spans then can be used to highlight text inside a textbox.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Classification;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.Text;
public class VBClassifier
{
private Workspace workspace;
private static string FileContent = #"
Public Sub Run()
Dim test as TestClass = new TestClass()
End Sub";
public void Classify()
{
var project = InitProject();
var doc = AddDocument(project, "file1.vb", FileContent);
var spans = Classify(doc);
}
protected IEnumerable<ClassifiedSpan> Classify(Document document)
{
var text = document.GetTextAsync().Result;
var span = new TextSpan(0, text.Length);
return Classifier.GetClassifiedSpansAsync(document, span).Result;
}
protected Document AddDocument(Project project, string fileName, string code)
{
var documentId = DocumentId.CreateNewId(project.Id, fileName);
ApplySolutionChanges(s => s.AddDocument(documentId, fileName, code, filePath: fileName));
return workspace.CurrentSolution.GetDocument(documentId);
}
protected virtual void ApplySolutionChanges(Func<Solution, Solution> action)
{
var solution = workspace.CurrentSolution;
solution = action(solution);
workspace.TryApplyChanges(solution);
}
protected MefHostServices GetRoslynCompositionHost()
{
IEnumerable<Assembly> assemblies = MefHostServices.DefaultAssemblies;
var compositionHost = MefHostServices.Create(assemblies);
return compositionHost;
}
protected Project CreateDefaultProject()
{
var solution = workspace.CurrentSolution;
var projectId = ProjectId.CreateNewId();
var projectName = "VBTest";
ProjectInfo projectInfo = ProjectInfo.Create(
projectId,
VersionStamp.Default,
projectName,
projectName,
LanguageNames.VisualBasic,
filePath: null);
ApplySolutionChanges(s => s.AddProject(projectInfo));
return workspace.CurrentSolution.Projects.FirstOrDefault();
}
protected Project InitProject()
{
var host = GetRoslynCompositionHost();
workspace = new AdhocWorkspace(host);
return CreateDefaultProject();
}
}
Update:
Here's a Visual Studio project demonstrating both approaches:
https://drive.google.com/file/d/1LLuzy7yDFAE-v40I7EswECYQSthxheEf/view?usp=sharing

Run last Photoshop script (again)

This seems like a trivial issue but I'm not sure Photoshop supports this type of functionality:
Is it possible to implement use last script functionality?
That is without having to add a function on each and every script that writes it's filename to a text file.
Well... It's a bit klunky, but I suppose you could read in the scriptlistener in reverse order and find the first mention of a script file:
// Switch off any dialog boxes
displayDialogs = DialogModes.NO; // OFF
var scripts_folder = "D:\\PS_scripts";
var js = "C:\\Users\\GhoulFool\\Desktop\\ScriptingListenerJS.log";
var jsLog = read_file(js);
var lastScript = process_file(jsLog);
// use function to call scripts
callScript(lastScript)
// Set Display Dialogs back to normal
displayDialogs = DialogModes.ALL; // NORMAL
function callScript (ascript)
{
eval('//#include "' + ascript + '";\r');
}
function process_file(afile)
{
var needle = ".jsx";
var msg = "";
// Let's do this backwards
for (var i = afile.length-1; i>=0; i--)
{
var str = afile[i];
if(str.indexOf(needle) > 0)
{
var regEx = str.replace(/(.+new\sFile\(\s")(.+\.jsx)(.+)/gim, "$2");
if (regEx != null)
{
return regEx;
}
}
}
}
function read_file(inFile)
{
var theFile = new File(inFile);
//read in file
var lines = new Array();
var l = 0;
var txtFile = new File(theFile);
txtFile.open('r');
var str = "";
while(!txtFile.eof)
{
var line = txtFile.readln();
if (line != null && line.length >0)
{
lines[l++] = line;
}
}
txtFile.close();
return lines;
}

Getting all the Term Stores in Sharepoint 2010 (web services or client-side object model)?

Is it possible with Sharepoint 2010 (not 2013!) to get a list of all the Term Stores on the site using either the web services or the client-side object model?
I know 2013 has added a library for it, but that will not help me on 2010.
If not the whole list, how do I get the Term Store ID, if I know a Term (that might or might not be in the TaxonomyHiddenList)?
Someone mentioned checking out the TaxonomyFieldType fields, so I hacked together these 2 methods. I do not know if these will work under all circumstances.
First function just returns the Term Store ID which is stored in the info of the first TaxonomyFieldType* we come over.
public static string GetDefaultTermStore(string site) {
var context = new ClientContext(site);
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
return node.Value;
}
}
}
throw new Exception("Term Store ID not found!");
}
The second function goes through all the fields and gets all the possible Term Store IDs and returns them in a list.
public static List<string> GetTermStores(string site) {
var context = new ClientContext(site);
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
var hashlist = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
if (!hashlist.Contains(node.Value)) {
hashlist.Add(node.Value);
}
}
}
}
if (hashlist.Count == 0) throw new Exception("No Term Store IDs not found!");
return hashlist.ToList();
}
Is this a correct answer to my question do anyone have a more sure way to get the IDs?
Does not seem like anyone else has a good answer for this question.
I have added the utility class I made from this below. Big block of uncommented code below for those who might need:
using Microsoft.SharePoint.Client;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Web.Services.Protocols;
using System.Windows.Forms;
using System.Xml.Linq;
using System.Xml.XPath;
namespace VitaminTilKanbanPusher.Sharepoint {
public class SharepointTaxonomyAgent {
//URLS:
//http://www.novolocus.com/2012/02/06/working-with-the-taxonomyclientservice-part-1-what-fields-are-there/
//
public static void Test() {
var site = ConfigurationManager.AppSettings["VitaminSite"];
//var list = ConfigurationManager.AppSettings["VitaminList"];
//var id = GetDefaultTermStore(site);
//var ids = GetTermStores(site);
var rs = GetAllTermSetNames(site);
var ts = GetTermSetTerms(site, "Some Name");
//var ts = GetTermSetTerms(site, "Some other name");
//var term = GetTermInfo(site, "Priority");
//var term2 = GetTermInfo(site, "My term");
//var termset = GetTermSetInfo(site, "My term");
//var termsets = GetTermSets(site, "My term");
}
public static string GetDefaultTermStore(string site) {
var context = new ClientContext(site);
context.ExecutingWebRequest += ctx_MixedAuthRequest;
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.InternalName, f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
foreach (var field in fields) {
//field.InternalName== "TaxKeyword" -> possibly default?
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
return node.Value;
}
}
}
throw new Exception("Term Store ID not found!");
}
public static List<string> GetTermStores(string site) {
var context = new ClientContext(site);
context.ExecutingWebRequest += ctx_MixedAuthRequest;
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
var hashlist = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
if (!hashlist.Contains(node.Value)) {
hashlist.Add(node.Value);
}
}
}
}
if (hashlist.Count == 0) throw new Exception("No Term Store IDs not found!");
return hashlist.ToList();
}
private static List<TermSet> _termSets;
public static List<TermSet> GetAllTermSetNames(string site, string onlySpecificTermSetName = null) {
if (_termSets != null) {
if (onlySpecificTermSetName == null) return _termSets;
foreach (var ts in _termSets) {
if (ts.Name.Equals(onlySpecificTermSetName, StringComparison.InvariantCultureIgnoreCase)) {
return new List<TermSet>() { ts };
}
}
return new List<TermSet>();
}
var context = new ClientContext(site);
context.ExecutingWebRequest += ctx_MixedAuthRequest;
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
var hashlist = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
var termSets = new List<TermSet>();
TermSet theChosenTermSet = null;
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var ts = new TermSet();
var doc = XDocument.Parse(field.SchemaXml);
var fn = doc.Element("Field");
if (fn == null) continue;
if (fn.Attribute("DisplayName") == null) continue;
if (fn.Attribute("ID") == null) continue;
ts.Name = fn.Attribute("DisplayName").Value;
//Only 1 set?
if (onlySpecificTermSetName != null) {
if (!ts.Name.Equals(onlySpecificTermSetName, StringComparison.InvariantCultureIgnoreCase)) {
theChosenTermSet = ts;
}
}
if (fn.Attribute("Description") != null) {
ts.Description = fn.Attribute("Description").Value;
}
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
ts.TermStoreId = node.Value;
}
var node2 = doc.XPathSelectElement("//Name[text()='TermSetId']/../Value");
if (node2 != null && !string.IsNullOrEmpty(node2.Value)) {
ts.Id = node2.Value;
}
else {
continue; //No ID found
}
//Unique hites
if (!hashlist.Contains(ts.Id)) {
hashlist.Add(ts.Id);
termSets.Add(ts);
}
}
}
_termSets = termSets;
if (onlySpecificTermSetName != null) return (theChosenTermSet == null ? new List<TermSet>() : new List<TermSet>() { theChosenTermSet });
return termSets;
}
public static TermSet GetTermSetTerms(string site, string termName) {
var ts = GetAllTermSetNames(site, termName);
if (ts.Count == 0) throw new Exception("Could not find termset: " + termName);
var theTermSet = ts[0];
var proxy = new SharepointTaxWS.Taxonomywebservice();
proxy.UseDefaultCredentials = true;
proxy.PreAuthenticate = true;
proxy.Url = Path.Combine(site, "_vti_bin/taxonomyclientservice.asmx");
GetAuthCookie(proxy, site);
var lciden = 1033; //var lcidno = 1044; // System.Globalization.CultureInfo.CurrentCulture.LCID
var clientTime = DateTime.Now.AddYears(-2).ToUniversalTime().Ticks.ToString();
var termStoreId = new Guid(theTermSet.TermStoreId);// Guid.Parse(theTermSet.TermStoreId);
var termSetId = new Guid(theTermSet.Id);
string clientTimestamps = string.Format("<timeStamp>{0}</timeStamp>", clientTime);
string clientVersion = "<version>1</version>";
string termStoreIds = string.Format("<termStoreId>{0}</termStoreId>", termStoreId.ToString("D"));
string termSetIds = string.Format("<termSetId>{0}</termSetId>", termSetId.ToString("D"));
string serverTermSetTimestampXml;
string result = proxy.GetTermSets(termStoreIds, termSetIds, 1033, clientTimestamps, clientVersion, out serverTermSetTimestampXml);
var term = ParseTermSetInfo(result);
term.Description = theTermSet.Description;
term.Id = theTermSet.Id;
term.Name = theTermSet.Name;
return term;
}
//public static Term GetTermSetInfo(string site, string termName) {
// var proxy = new SharepointTaxWS.Taxonomywebservice();
// proxy.UseDefaultCredentials = true;
// proxy.PreAuthenticate = true;
// proxy.Url = Path.Combine(site, "_vti_bin/taxonomyclientservice.asmx");
// GetAuthCookie(proxy, site);
// var lciden = 1033; //var lcidno = 1044; // System.Globalization.CultureInfo.CurrentCulture.LCID
// var sets = proxy.GetChildTermsInTermSet(Guid.Parse(""), lciden, Guid.Parse("termsetguid"));
// var term = ParseTermInfo(sets);
// return term;
//}
public static Term GetTermInfo(string site, string termName) {
var proxy = new SharepointTaxWS.Taxonomywebservice();
proxy.UseDefaultCredentials = true;
proxy.PreAuthenticate = true;
proxy.Url = Path.Combine(site, "_vti_bin/taxonomyclientservice.asmx");
GetAuthCookie(proxy, site);
var lciden = 1033; //var lcidno = 1044; // System.Globalization.CultureInfo.CurrentCulture.LCID
var sets = proxy.GetTermsByLabel(termName, lciden, SharepointTaxWS.StringMatchOption.StartsWith, 100, null, false);
var term = ParseTermInfo(sets);
return term;
}
private static TermSet ParseTermSetInfo(string xml) {
//Not done
var info = XDocument.Parse(xml);
var ts = new TermSet();
ts.Terms = new List<Term>();
var n1 = info.XPathSelectElements("//T");
if (n1 != null) {
foreach (var item in n1) {
var t = new Term();
t.Id = item.Attribute("a9").Value;
t.Name = item.XPathSelectElement("LS/TL").Attribute("a32").Value;
t.TermSet = ts;
ts.Terms.Add(t);
}
}
return ts;
}
private static Term ParseTermInfo(string xml) {
var info = XDocument.Parse(xml);
var t = new Term();
var ts = new TermSet();
var n1 = info.XPathSelectElement("TermStore/T");
var n2 = info.XPathSelectElement("TermStore/T/LS/TL");
var n3 = info.XPathSelectElement("TermStore/T/TMS/TM");
if (n1 != null && n1.Attribute("a9") != null) {
t.Id = n1.Attribute("a9").Value;
}
if (n2 != null && n2.Attribute("a32") != null) {
t.Name = n2.Attribute("a32").Value;
}
if (n3 != null && n3.Attribute("a24") != null) {
ts.Id = n3.Attribute("a24").Value;
}
if (n3 != null && n3.Attribute("a12") != null) {
ts.Name = n3.Attribute("a12").Value;
}
t.TermSet = ts;
return t;
}
private static CookieCollection _theAuth;
private static bool _bNoClaims;
static void GetAuthCookie(SoapHttpClientProtocol proxy, string site) {
return;
//if (_bNoClaims) {
// return; //Ingen claims.
//}
//// get the cookie collection - authentication workaround
//CookieCollection cook = null;
//try {
// if (_theAuth == null) {
// cook = ClaimClientContext.GetAuthenticatedCookies(site, 925, 525);
// }
// else {
// cook = _theAuth;
// }
// _theAuth = cook;
// _bNoClaims = false;
//}
//catch (ApplicationException ex) {
// if (ex.Message.Contains("claim")) _bNoClaims = true;
// Console.Write("Auth feilet: " + ex.Message + " - ");
// //IGNORE
//}
//if (_theAuth != null) {
// proxy.CookieContainer = new CookieContainer();
// proxy.CookieContainer.Add(_theAuth);
//}
}
static void ctx_MixedAuthRequest(object sender, WebRequestEventArgs e) {
//add the header that tells SharePoint to use Windows Auth
e.WebRequestExecutor.RequestHeaders.Add("X-FORMS_BASED_AUTH_ACCEPTED", "f");
}
}
public class TermSet {
public string Id { get; set; }
public string Name { get; set; }
public List<Term> Terms { get; set; }
public string TermStoreId { get; set; }
public string Description { get; set; }
public override string ToString() {
int tc = 0;
if (Terms != null) tc = Terms.Count;
return Name + "|" + Id + " (" + tc + "terms)";
}
}
public class Term {
public string Id { get; set; }
public string Name { get; set; }
public TermSet TermSet { get; set; }
public override string ToString() {
return Name + "|" + Id;
}
}
}

Get resolved SPUser IDs from Sharepoint 2010 PeoplePicker

I try to get selected user IDs from people picker control as below:
function GetUserIdsFromPP() {
var xml = _picker.find('div#divEntityData');
var visiblefor = new Array();
xml.each(function (i, row) {
var data = $(this).children().first('div').attr('data');
var xmlDoc;
if (window.DOMParser) {
parser = new DOMParser();
xmlDoc = parser.parseFromString(data, "text/xml");
}
else // Internet Explorer
{
xmlDoc = new ActiveXObject("Microsoft.XMLDOM");
xmlDoc.async = false;
xmlDoc.loadXML(data);
}
var uid = xmlDoc.getElementsByTagName('Value')[0].firstChild.nodeValue;
visiblefor.push(uid);
});
return visiblefor;
}
The problem is that sometimes XML doesn't contain <Key>SPUserID</Key><Value>1</Value> and I get FQUN (user login with domain name).
What is the better way to resolve selected SPUserIds from PeoplePicker control?
This is how resolve emails from people picker control on client side
function GetEmailsFromPicker() {
var xml = _picker.find('div#divEntityData');
var result = new Array();
xml.each(function (i, row) {
var data = $(this).children().first('div').attr('data');
var xmlDoc;
if (window.DOMParser) {
parser = new DOMParser();
xmlDoc = parser.parseFromString(data, "text/xml");
}
else // Internet Explorer
{
xmlDoc = new ActiveXObject("Microsoft.XMLDOM");
xmlDoc.async = false;
xmlDoc.loadXML(data);
}
var emailIndex = -1;
for (var i = 0; i < xmlDoc.childNodes[0].childNodes.length; i++) {
var element = xmlDoc.childNodes[0].childNodes[i];
var key = element.childNodes[0].childNodes[0].nodeValue;
if (key == 'Email') {
var uid = xmlDoc.childNodes[0].childNodes[i].childNodes[1].childNodes[0].nodeValue;
result.push({ EMail: uid });
break;
}
}
});
return result;
}
Use the above answer, but...
Replace this with an appropriate Jquery or Javascript element name.
var xml = _picker.find('div#divEntityData');

Why does this controller double the inserts when I try to archive the results of the Bing Search API?

I'm trying to archive my search results for a term by
Using the Bing API in an async controller
Inserting them into database using Entity Framework
using the Bing API and insert them into a database using entity framework. For whatever reason it is returning 50 results, but then it enters 100 results into the database.
My Controller Code:
public class DHWebServicesController : AsyncController
{
//
// GET: /WebService/
private DHContext context = new DHContext();
[HttpPost]
public void RunReportSetAsync(int id)
{
int iTotalCount = 1;
AsyncManager.OutstandingOperations.Increment(iTotalCount);
if (!context.DHSearchResults.Any(xx => xx.CityMarketComboRunID == id))
{
string strBingSearchUri = #ConfigurationManager.AppSettings["BingSearchURI"];
string strBingAccountKey = #ConfigurationManager.AppSettings["BingAccountKey"];
string strBingUserAccountKey = #ConfigurationManager.AppSettings["BingUserAccountKey"];
CityMarketComboRun cityMarketComboRun = context.CityMarketComboRuns.Include(xx => xx.CityMarketCombo).Include(xx => xx.CityMarketCombo.City).First(xx => xx.CityMarketComboRunID == id);
var bingContainer = new Bing.BingSearchContainer(new Uri(strBingSearchUri));
bingContainer.Credentials = new NetworkCredential(strBingUserAccountKey, strBingAccountKey);
// now we can build the query
Keyword keyword = context.Keywords.First();
var bingWebQuery = bingContainer.Web(keyword.Name, "en-US", "Moderate", cityMarketComboRun.CityMarketCombo.City.Latitude, cityMarketComboRun.CityMarketCombo.City.Longitude, null, null, null);
var bingWebResults = bingWebQuery.Execute();
context.Configuration.AutoDetectChangesEnabled = false;
int i = 1;
DHSearchResult dhSearchResult = new DHSearchResult();
List<DHSearchResult> lst = new List<DHSearchResult>();
var webResults = bingWebResults.ToList();
foreach (var result in webResults)
{
dhSearchResult = new DHSearchResult();
dhSearchResult.BingID = result.ID;
dhSearchResult.CityMarketComboRunID = id;
dhSearchResult.Description = result.Description;
dhSearchResult.DisplayUrl = result.DisplayUrl;
dhSearchResult.KeywordID = keyword.KeywordID;
dhSearchResult.Created = DateTime.Now;
dhSearchResult.Modified = DateTime.Now;
dhSearchResult.Title = result.Title;
dhSearchResult.Url = result.Url;
dhSearchResult.Ordinal = i;
lst.Add(dhSearchResult);
i++;
}
foreach (DHSearchResult c in lst)
{
context.DHSearchResults.Add(c);
context.SaveChanges();
}
AsyncManager.Parameters["message"] = "The total number of results was "+lst.Count+". And there are " + context.DHSearchResults.Count().ToString();
}
else
{
AsyncManager.Parameters["message"] = "You have already run this report";
}
AsyncManager.OutstandingOperations.Decrement(iTotalCount);
}
public string RunReportSetCompleted(string message)
{
string str = message;
return str;
}
}
Here is how I am calling it from my asp.net mvc 4 page.
#Ajax.ActionLink("Run Report", "GatherKeywordsFromBing", "DHWebServices",
new { id=item.CityMarketComboRunID},
new AjaxOptions { OnSuccess = "ShowNotifier();", UpdateTargetId = "TopNotifierMessage", HttpMethod = "POST", InsertionMode = InsertionMode.Replace, LoadingElementId = strCityMarketComboProgressID, LoadingElementDuration = 1000 },
new { #class = "ViewLink" })
<span class="ProgressIndicator" id="#strCityMarketComboProgressID"><img src="#Url.Content("~/Content/img/SmallBall.gif")" alt="loading" /></span>
For whatever reason all of
Try saving only once:
foreach (DHSearchResult c in lst)
{
context.DHSearchResults.Add(c);
}
context.SaveChanges();
Also there's nothing asynchronous in your code, so there's no point of using asynchronous controller. Not only that it won't improve anything but it might make things worse.