An invalid request URI was provided in datatable webapi calling - asp.net-mvc-4

An invalid request URI was provided. The request URI must either be an absolute URI or BaseAddress must be set.
Here is the Api Controller:
EmailTemplate.UI\Areas\Ticket\Api\TicketController.cs
HERE IS THE CODE:
var client = new HttpClient();
string _url = _apiTicket + "Areas/Ticket/Api/TicketController/Get/10?PageIndex=" + pageIndex + "&PageSize=" + pageSize;
var response = client.GetAsync(_url).Result;
var result1 = response.Content.ReadAsStringAsync().Result;
here in response this error comes.
i want to filter data by pageIndex and pagesize in datatable server side mvc 4.
Here is the method in api:
public HttpResponseMessage Get1(int UserId)
{
string _searchString = GetQueryValueByName.Get(Request.GetQueryNameValuePairs(), "searchstr");
int _start = int.Parse(GetQueryValueByName.Get(Request.GetQueryNameValuePairs(),"start"));
int _length =int.Parse(GetQueryValueByName.Get(Request.GetQueryNameValuePairs(),"length"));
List<sp_Ticketlist_Result> _dbData;
int _dataTotaRowCount;
_dbData = _repository.GetTicket(UserId).ToList();
_dataTotaRowCount = _dbData.Count();
if (!string.IsNullOrEmpty(_searchString))
{
_dbData = _dbData.Where(m =>
m.Name.ToUpper().Contains(_searchString.ToUpper())).ToList();
_dataTotaRowCount = _dbData.Count();
_dbData = _dbData.Skip(_start).Take(_length).ToList();
}
else
{
_dbData = _dbData.Skip(_start).Take(_length).ToList();
}
return Request.CreateResponse(HttpStatusCode.OK,DataTableObjectConverter.ConvertData(_dbData, _dataTotaRowCount));
}
public static class GetQueryValueByName
{
public static string Get(IEnumerable<KeyValuePair<string, string>> _req,
string key)
{
return _req.FirstOrDefault(ma => string.Compare(ma.Key, key) ==
0).Value;
}
}
public static class DataTableObjectConverter
{
public static DataTableObject ConvertData<T>(T source, int count)
where T : class, new()
{
DataTableObject _obj = new DataTableObject();
//_obj.draw = 1;
_obj.recordsFiltered = count;
_obj.recordsTotal = count;
_obj.data = source;
return _obj;
}
}
Is there need for any method with pageIndex and pagesize??
how do i call data through pageIndex and pagesize defines in method??
Here is my GetData method:
public ActionResult GetData()
{
// Initialization.
JsonResult result = new JsonResult();
try
{
// Initialization.
string search = Request.Form.GetValues("search[value]")[0];
string draw = Request.Form.GetValues("draw")[0];
string order = Request.Form.GetValues("order[0][column]")[0];
string orderDir = Request.Form.GetValues("order[0][dir]")[0];
int startRec = Convert.ToInt32(Request.Form.GetValues("start")[0]);
// int pageSize = Convert.ToInt32(Request.Form.GetValues("length")[0]);
var start = Request.Form.GetValues("start").FirstOrDefault();
var length = Request.Form.GetValues("length").FirstOrDefault();
int pageSize = length != null ? Convert.ToInt32(length) : 0;
int recordStatr = start != null ? Convert.ToInt32(start) : 0;
recordStatr = recordStatr == 0 ? 1 : recordStatr;
var pageIndex = (recordStatr / pageSize) + 1;
int recordsTotal = 0;
// Loading.
List<AppTicket> data = this.LoadData();
// Total record count.
int totalRecords = data.Count;
// Verification.
//if (!string.IsNullOrEmpty(search) &&
// !string.IsNullOrWhiteSpace(search))
//{
// // Apply search
// data = data.Where(p => p.Title.ToString().ToLower().Contains(search.ToLower()) ||
// p.Name.ToLower().Contains(search.ToLower()) ||
// p.Email.ToString().ToLower().Contains(search.ToLower())).ToList();
// //p.ProductName.ToLower().Contains(search.ToLower()) ||
// //p.SpecialOffer.ToLower().Contains(search.ToLower()) ||
// //p.UnitPrice.ToString().ToLower().Contains(search.ToLower()) ||
// //p.UnitPriceDiscount.ToString().ToLower().Contains(search.ToLower())).ToList();
//}
// Sorting.
data = this.SortByColumnWithOrder(order, orderDir, data);
// Filter record count.
int recFilter = data.Count;
// Apply pagination.
// data = data.Skip(startRec).Take(pageSize).ToList();
// Loading drop down lists.
// result = this.Json(new { draw = Convert.ToInt32(draw), recordsTotal = totalRecords, recordsFiltered = recFilter, data = data }, JsonRequestBehavior.AllowGet);
//Find Order Column
//var sortColumn = Request.Form.GetValues("columns[" + Request.Form.GetValues("order[0][column]").FirstOrDefault() + "][name]").FirstOrDefault();
//var sortColumnDir = Request.Form.GetValues("order[0][dir]").FirstOrDefault();
var client = new HttpClient();
//client.BaseAddress = new Uri("http://localhost:1849");
//client.DefaultRequestHeaders.Accept.Clear();
// client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
// string _url = _apiTicket + ".Get?UserId=" + 10 + "&PageIndex=" + pageIndex + "&PageSize=" + pageSize;
//var response = client.GetAsync(_url).Result;
//var response1 = client.GetAsync("/Areas/Ticket/Api/Get/10,10,10").Result;
//if (response1.IsSuccessStatusCode)
//{
// string responseString = response.Content.ReadAsStringAsync().Result;
//}
// string _url = _apiTicket + "Get1/10?searchstr=Monaj&PageIndex=" + pageIndex + "&PageSize=" + pageSize;
// string apiUrl = "Api/Ticket/10?searchstr=Monaj&PageIndex=" + pageIndex + "&PageSize=" + pageSize;
string apiUrl = "../Areas/api/Ticket/1?searchstr=Monaj&start=0&length=10";
var response = client.GetAsync(apiUrl).Result;
var result1 = response.Content.ReadAsStringAsync().Result;
// HttpResponseMessage response = await client.GetAsync(_url);
// HttpResponseMessage response = client.GetAsync(_url).Result;
//var response = client.GetAsync(_url).Result;
// var result1 = response.Content.ReadAsStringAsync().Result;
JsonResult jsonresult = Json(result1, JsonRequestBehavior.AllowGet);
AppTicket _contacts = new AppTicket();
_contacts = JsonConvert.DeserializeObject<AppTicket>(jsonresult.Data.ToString());
//return Json(new { draw = draw, recordsFiltered = recordsTotal, recordsTotal = recordsTotal, data = _contacts.listcourses }, JsonRequestBehavior.AllowGet);
result = this.Json(new { draw = Convert.ToInt32(draw), recordsFiltered = totalRecords, recordsTotal = recordsTotal, data = data }, JsonRequestBehavior.AllowGet);
}
catch (Exception ex)
{
// Info
Console.Write(ex);
}
// Return info.
return result;
}
I am using the json ajax here in custom-datatable.js.
here is the code:
$(document).ready(function ()
{
debugger
$('#TableId').DataTable(
{
//"columnDefs": [
// { "width": "5%", "targets": [0] },
// {
// "className": "text-center custom-middle-align",
// "targets": [0, 1, 2, 3, 4, 5, 6]
// },
//],
'columnDefs': [{
'targets': 0,
'searchable': false,
'orderable': false,
'width': '1%',
'className': 'dt-body-center',
'render': function (data, type, full, meta) {
return '<input type="checkbox">';
}
}
,
{
targets: 2,
render: function (data, type, row, meta) {
if (type === 'display') {
data = '<a href="/TicketTemplate/AppDetails/' + row.Id + ' " >' + data + '</a>';
}
return data;
}
},
{
targets: 1,
render: function (data, type, row, meta) {
return moment(data).format('DD/MM/YYYY HH:mm:ss');
}
}
],
"language":
{
"processing": "<div class='overlay custom-loader-background'><i class='fa fa-cog fa-spin custom-loader-color'></i></div>"
},
"processing": true,
"serverSide": true,
"ajax":
{
"url": "/TicketTemplate/GetData",
"type": "POST",
"dataType": "JSON"
},
"columns": [
{ "data": '' },
{ "data": "CreatedDate" },
{ "data": "Title" },
//{
// //"data": "title",
// "render": function (data, type, row, meta) {
// //return '' + title + '';
// return '' + data + '';
// }
//},
//{
//{
// //"data": "title",
// "render": function (data, type, row, meta) {
// //return '' + title + '';
// return "" + row.Title + " ";
// }
//},
{ "data": "Name" },
{ "data": "Email" },
{ "data": "AssignTo" },
{ "data": "Status" }
]
});
});
First of all it hit the Mvc controller GetData() and in GetData() call the Api Controller Get().
Paging sorting searching all are in using api dynamically.

As you have mentioned in your comment that, "i want to call the datatable paging sorting searching through MVC controller and web api controller", then I would recommend to do like this, that is what i have done in many projects.
//This is my API Get method
public HttpResponseMessage Get(int id)
{
string _searchString =
GetQueryValueByName.Get(Request.GetQueryNameValuePairs(), "searchstr");
int _start =
int.Parse(GetQueryValueByName.Get(Request.GetQueryNameValuePairs(),
"start"));
int _length =
int.Parse(GetQueryValueByName.Get(Request.GetQueryNameValuePairs(),
"length"));
List<sp_Ticketlist_Result> _dbData;
int _dataTotaRowCount;
_dbData = _repository.GetTicket(id).ToList();
_dataTotaRowCount = _dbData.Count();
if (!string.IsNullOrEmpty(_searchString))
{
_dbData = _dbData.Where(m =>
m.Name.ToUpper().Contains(_searchString.ToUpper())).ToList();
_dataTotaRowCount = _dbData.Count();
_dbData = _dbData.Skip(_start).Take(_length).ToList();
}
else
{
_dbData = _dbData.Skip(_start).Take(_length).ToList();
}
return Request.CreateResponse(HttpStatusCode.OK,
DataTableObjectConverter.ConvertData(_dbData, _dataTotaRowCount));
}
Bellow is my GetQueryValueByName and DataTableObjectConverter Class, that i kept in a separated class file and just reference to my Api Controller.
public static class GetQueryValueByName
{
public static string Get(IEnumerable<KeyValuePair<string, string>> _req,
string key)
{
return _req.FirstOrDefault(ma => string.Compare(ma.Key, key) ==
0).Value;
}
}
public static class DataTableObjectConverter
{
public static DataTableObject ConvertData<T>(T source, int count)
where T : class, new()
{
DataTableObject _obj = new DataTableObject();
//_obj.draw = 1;
_obj.recordsFiltered = count;
_obj.recordsTotal = count;
_obj.data = source;
return _obj;
}
}
public class DataTableObject
{
public int recordsTotal { get; set; }
public int recordsFiltered { get; set; }
public Object data { get; set; }
}
Then my URL will be like this,
string apiUrl = "http://localhost:55442/api/Ticket/1?
searchstr=Monaj&start=0&length=10";
var client = new HttpClient();
var response = client.GetAsync(apiUrl).Result;
var result1 = response.Content.ReadAsStringAsync().Result;
Note: See, here I'm passing start,length,search string as query string and I'm fetching them in my Api Get method. Just update your URL here and don't give / in query string parameters. You have written like this,
" + pageIndex + "/PageSize=" + pageSize;
This is wrong. Check how I have done. You need to separate by &

Related

Error when uploading file using Microsoft Graph API

I am trying to upload a large file to One Drive using Microsoft Graph API.
Uploading to One Drive works normally, but the file is damaged.
Please help me to solve the problem.
public ActionResult UploadLargeFiles(string id, [FromForm]IFormFile files)
{
string fileName = files.FileName;
int fileSize = Convert.ToInt32(files.Length);
var uploadProvider = new JObject();
var res = new JArray();
var isExistence = _mailService.GetUploadFolder(id);
if (isExistence != HttpStatusCode.OK)
{
var createFolder = _mailService.CreateUploadFolder(id);
if (createFolder != HttpStatusCode.Created)
{
return BadRequest(ModelState);
}
}
if (files.Length > 0)
{
var uploadSessionUrl = _mailService.CreateUploadSession(id, fileName);
if (uploadSessionUrl != null)
{
if (fileSize < 4194304)
{
uploadProvider = _mailService.UploadByteFile(id, uploadSessionUrl, files);
res.Add(uploadProvider);
}
}
else
{
return BadRequest(ModelState);
}
}
return Ok();
}
createUploadSession
public string CreateUploadSession(string upn, string fileName)
{
var uploadSession = _mailGraphService.CreateUploadSession(upn, fileName).Result;
var sessionResult = new UploadSessionDTO(uploadSession);
return sessionResult.uploadUrl;
}
public async Task<UploadSessionDTO> CreateUploadSession(string upn, string fileName)
{
this.InitHttpClient();
var jObject = JObject.FromObject(new { item = new Dictionary<string, object> { { "#microsoft.graph.conflictBehavior", "rename" } }, fileSystemInfo = new Dictionary<string, object> { { "#odata.type", "microsoft.graph.fileSystemInfo" } }, name = fileName });
var toJson = JsonConvert.SerializeObject(jObject);
var content = new StringContent(toJson, Encoding.UTF8, "application/json");
var response = await _client.PostAsync("users/"+ upn + "/drive/root:/MailFiles/" + fileName +":/createUploadSession", content);
if (!response.IsSuccessStatusCode)
return null;
var strData = await response.Content.ReadAsStringAsync();
dynamic uploadSession = JsonConvert.DeserializeObject<UploadSessionDTO>(strData);
return uploadSession;
}
public JObject LargeFileUpload(string upn, string url, IFormFile files)
{
var responseCode = HttpStatusCode.OK;
var jObject = new JObject();
int idx = 0;
int fileSize = Convert.ToInt32(files.Length);
int fragSize = 4 * 1024 * 1024; //4MB => 4 * 1024 * 1024;
var byteRemaining = fileSize;
var numFragments = (byteRemaining / fragSize) + 1;
while (idx < numFragments)
{
var chunkSize = fragSize;
var start = idx * fragSize;
var end = idx * fragSize + chunkSize - 1;
var offset = idx * fragSize;
if (byteRemaining < chunkSize)
{
chunkSize = byteRemaining;
end = fileSize - 1;
}
var contentRange = " bytes " + start + "-" + end + "/" + fileSize;
byte[] file = new byte[chunkSize];
using (var client = new HttpClient())
{
var content = new ByteArrayContent(file);
content.Headers.Add("Content-Length", chunkSize.ToString());
content.Headers.Add("Content-Range", contentRange);
var response = client.PutAsync(url, content);
var strData = response.Result.Content.ReadAsStringAsync().Result;
responseCode = response.Result.StatusCode;
//업로드 성공
if (responseCode == HttpStatusCode.Created)
{
JObject data = JObject.Parse(strData);
string downloadUrl = data["#content.downloadUrl"].ToString();
string itemId = data["id"].ToString();
//파일 크기 -> kb로 변환
fileSize = fileSize / 1000;
jObject = JObject.FromObject(new { name = files.Name, id = itemId, url = downloadUrl, size = (double)fileSize });
}
//업로드 충돌
else if (responseCode == HttpStatusCode.Conflict)
{
var restart = RestartByteFile(upn, url, files.Name);
responseCode = restart;
}
}
byteRemaining = byteRemaining - chunkSize;
idx++;
}
if (responseCode == HttpStatusCode.Created) { return jObject; }
else return jObject = JObject.FromObject(new { result = "실패" });
}
When I checked OneDrive, the file was uploaded normally, and when I downloaded and opened the file, it came out as a damaged file.
I wonder why the file gets corrupted when uploaded, and how to fix it.
If the problem cannot be solved, please let us know that it cannot be solved.

Photo doesn't show in browser

I am making a web application in asp.net with MVC 4 and i'm trying to show the user twitter home feed. To get the user home feed i'm using twitterizer2. Everything work fine and i have the user home feed and the feed photo link but it doesn't display in the browser. If i open the picture link from the browser address bar it is displayed and also if i use another photo link not related with twitter everything works fine. So i'm guessing it has something to do with twitter.
My view is this:
<div class="NoBullets" style="font-size:#Model.TextSize">
<ul style="list-style-type:none">
#foreach (var status in Model.TStatusCollection)
{
<li>
<img src=#status.User.ProfileImageLocation style="float:left" width="48" height="48" align="bottom"> #status.Text<br />
#string.Format("{0:dd MMMM yyyy} {0:H:mm}", status.CreatedDate)
</li>
}
</ul>
</div>
And the model:
public class PortletMyTwitter : PortletBase
{
private int noOfTweets = 15;
private string textSize = "medium";
private string userExtAppID;
private TwitterStatusCollection tStatusCollection;
private IList<object> noOfTweetsList = new List<object>()
{
new {value = 5},
new {value = 10},
new {value = 15},
new {value = 20},
new {value = 25}
};
private IList<object> textSizeList = new List<object>()
{
new {value = "small"},
new {value = "medium"},
new {value = "large"}
};
public string UserExtAppID
{
get { return userExtAppID; }
set { userExtAppID = value; }
}
public IList<object> NoOfTweetsList
{
get { return noOfTweetsList; }
}
public int NoOfTweets
{
get { return noOfTweets; }
set { noOfTweets = value; }
}
public IList<object> TextSizeList
{
get { return textSizeList; }
}
public string TextSize
{
get { return textSize; }
set { textSize = value; }
}
public TwitterStatusCollection TStatusCollection
{
get { return tStatusCollection; }
}
public void GetSettings(XmlDocument xmlPortletState)
{
if (xmlPortletState.GetElementsByTagName("UserExtAppID").Count > 0)
{
if (xmlPortletState.GetElementsByTagName("UserExtAppID")[0].FirstChild != null)
UserExtAppID = ((System.Xml.XmlText)(xmlPortletState.GetElementsByTagName("UserExtAppID")[0]).FirstChild).Value;
}
if (xmlPortletState.GetElementsByTagName("HideHeader").Count > 0)
{
if (xmlPortletState.GetElementsByTagName("HideHeader")[0].FirstChild != null)
HideHeader = bool.Parse(((System.Xml.XmlText)(xmlPortletState.GetElementsByTagName("HideHeader")[0]).FirstChild).Value);
}
if (xmlPortletState.GetElementsByTagName("TextSize").Count > 0)
{
if (xmlPortletState.GetElementsByTagName("TextSize")[0].FirstChild != null)
try
{
TextSize = ((System.Xml.XmlText)(xmlPortletState.GetElementsByTagName("TextSize")[0]).FirstChild).Value;
}
catch
{
TextSize = "medium";
}
}
if (xmlPortletState.GetElementsByTagName("NoOfTweets").Count > 0)
{
if (xmlPortletState.GetElementsByTagName("NoOfTweets")[0].FirstChild != null)
try
{
NoOfTweets = Convert.ToInt32(((System.Xml.XmlText)(xmlPortletState.GetElementsByTagName("NoOfTweets")[0]).FirstChild).Value);
}
catch
{
NoOfTweets = 10;
}
}
UpdateFeed();
}
protected void UpdateFeed()
{
try
{
OAuthTokens oauthTokens = new OAuthTokens()
{
AccessToken = "",
AccessTokenSecret = "",
ConsumerKey = "",
ConsumerSecret = ""
};
TimelineOptions myOptions = new TimelineOptions();
myOptions.IncludeRetweets = false;
myOptions.UseSSL = true;
myOptions.APIBaseAddress = "https://api.twitter.com/1.1/";
myOptions.Count = NoOfTweets;
TwitterResponse<TwitterStatusCollection> twitterDataSource = TwitterTimeline.HomeTimeline(oauthTokens, myOptions);
tStatusCollection = twitterDataSource.ResponseObject;
}
catch (Exception)
{
}
}
}

How to get modified values from dojo table

I have a Dojo table with list of key value pairs. Both fields are editable, once a value is modified i am doing:
var items = grid.selection.getSelected();
However, the modified value is not picked up only the old value is picked.
I tried the following:
dojo.parser.parse()
dojo.parser.instantiate([dojo.byId("tableDiv")]);
but none of them worked. Can any one sugggest a solution for this.
function getAllItems() {
var returnData = "";
//dojo.parser.parse();
//dojo.parser.instantiate([dojo.byId("tableDiv")]);
//grid._refresh();
var items = grid.selection.getSelected();
function gotItems(items, request) {
var i;
for (i = 0; i < items.length; i++) {
var item = items[i];
var paramName = grid.store.getValues(item, "paramName");
var paramValue = grid.store.getValues(item, "paramValue");
if (returnData == "") {
returnData = paramName + "&" + paramValue;
} else {
returnData = returnData + "#" + paramName + "&"
+ paramValue;
} document.getElementById("returnData").value = returnData;
document.getElementById("successFlag").value = "true";
}
}
//Called when loop fails
function fetchFailed(error, request) {
alert("Error reading table data");
}
//Fetch the data.
jsonStore.fetch({
onComplete : gotItems,
onError : fetchFailed
});
}

Getting all the Term Stores in Sharepoint 2010 (web services or client-side object model)?

Is it possible with Sharepoint 2010 (not 2013!) to get a list of all the Term Stores on the site using either the web services or the client-side object model?
I know 2013 has added a library for it, but that will not help me on 2010.
If not the whole list, how do I get the Term Store ID, if I know a Term (that might or might not be in the TaxonomyHiddenList)?
Someone mentioned checking out the TaxonomyFieldType fields, so I hacked together these 2 methods. I do not know if these will work under all circumstances.
First function just returns the Term Store ID which is stored in the info of the first TaxonomyFieldType* we come over.
public static string GetDefaultTermStore(string site) {
var context = new ClientContext(site);
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
return node.Value;
}
}
}
throw new Exception("Term Store ID not found!");
}
The second function goes through all the fields and gets all the possible Term Store IDs and returns them in a list.
public static List<string> GetTermStores(string site) {
var context = new ClientContext(site);
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
var hashlist = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
if (!hashlist.Contains(node.Value)) {
hashlist.Add(node.Value);
}
}
}
}
if (hashlist.Count == 0) throw new Exception("No Term Store IDs not found!");
return hashlist.ToList();
}
Is this a correct answer to my question do anyone have a more sure way to get the IDs?
Does not seem like anyone else has a good answer for this question.
I have added the utility class I made from this below. Big block of uncommented code below for those who might need:
using Microsoft.SharePoint.Client;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Web.Services.Protocols;
using System.Windows.Forms;
using System.Xml.Linq;
using System.Xml.XPath;
namespace VitaminTilKanbanPusher.Sharepoint {
public class SharepointTaxonomyAgent {
//URLS:
//http://www.novolocus.com/2012/02/06/working-with-the-taxonomyclientservice-part-1-what-fields-are-there/
//
public static void Test() {
var site = ConfigurationManager.AppSettings["VitaminSite"];
//var list = ConfigurationManager.AppSettings["VitaminList"];
//var id = GetDefaultTermStore(site);
//var ids = GetTermStores(site);
var rs = GetAllTermSetNames(site);
var ts = GetTermSetTerms(site, "Some Name");
//var ts = GetTermSetTerms(site, "Some other name");
//var term = GetTermInfo(site, "Priority");
//var term2 = GetTermInfo(site, "My term");
//var termset = GetTermSetInfo(site, "My term");
//var termsets = GetTermSets(site, "My term");
}
public static string GetDefaultTermStore(string site) {
var context = new ClientContext(site);
context.ExecutingWebRequest += ctx_MixedAuthRequest;
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.InternalName, f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
foreach (var field in fields) {
//field.InternalName== "TaxKeyword" -> possibly default?
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
return node.Value;
}
}
}
throw new Exception("Term Store ID not found!");
}
public static List<string> GetTermStores(string site) {
var context = new ClientContext(site);
context.ExecutingWebRequest += ctx_MixedAuthRequest;
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
var hashlist = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
if (!hashlist.Contains(node.Value)) {
hashlist.Add(node.Value);
}
}
}
}
if (hashlist.Count == 0) throw new Exception("No Term Store IDs not found!");
return hashlist.ToList();
}
private static List<TermSet> _termSets;
public static List<TermSet> GetAllTermSetNames(string site, string onlySpecificTermSetName = null) {
if (_termSets != null) {
if (onlySpecificTermSetName == null) return _termSets;
foreach (var ts in _termSets) {
if (ts.Name.Equals(onlySpecificTermSetName, StringComparison.InvariantCultureIgnoreCase)) {
return new List<TermSet>() { ts };
}
}
return new List<TermSet>();
}
var context = new ClientContext(site);
context.ExecutingWebRequest += ctx_MixedAuthRequest;
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
var hashlist = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
var termSets = new List<TermSet>();
TermSet theChosenTermSet = null;
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var ts = new TermSet();
var doc = XDocument.Parse(field.SchemaXml);
var fn = doc.Element("Field");
if (fn == null) continue;
if (fn.Attribute("DisplayName") == null) continue;
if (fn.Attribute("ID") == null) continue;
ts.Name = fn.Attribute("DisplayName").Value;
//Only 1 set?
if (onlySpecificTermSetName != null) {
if (!ts.Name.Equals(onlySpecificTermSetName, StringComparison.InvariantCultureIgnoreCase)) {
theChosenTermSet = ts;
}
}
if (fn.Attribute("Description") != null) {
ts.Description = fn.Attribute("Description").Value;
}
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
ts.TermStoreId = node.Value;
}
var node2 = doc.XPathSelectElement("//Name[text()='TermSetId']/../Value");
if (node2 != null && !string.IsNullOrEmpty(node2.Value)) {
ts.Id = node2.Value;
}
else {
continue; //No ID found
}
//Unique hites
if (!hashlist.Contains(ts.Id)) {
hashlist.Add(ts.Id);
termSets.Add(ts);
}
}
}
_termSets = termSets;
if (onlySpecificTermSetName != null) return (theChosenTermSet == null ? new List<TermSet>() : new List<TermSet>() { theChosenTermSet });
return termSets;
}
public static TermSet GetTermSetTerms(string site, string termName) {
var ts = GetAllTermSetNames(site, termName);
if (ts.Count == 0) throw new Exception("Could not find termset: " + termName);
var theTermSet = ts[0];
var proxy = new SharepointTaxWS.Taxonomywebservice();
proxy.UseDefaultCredentials = true;
proxy.PreAuthenticate = true;
proxy.Url = Path.Combine(site, "_vti_bin/taxonomyclientservice.asmx");
GetAuthCookie(proxy, site);
var lciden = 1033; //var lcidno = 1044; // System.Globalization.CultureInfo.CurrentCulture.LCID
var clientTime = DateTime.Now.AddYears(-2).ToUniversalTime().Ticks.ToString();
var termStoreId = new Guid(theTermSet.TermStoreId);// Guid.Parse(theTermSet.TermStoreId);
var termSetId = new Guid(theTermSet.Id);
string clientTimestamps = string.Format("<timeStamp>{0}</timeStamp>", clientTime);
string clientVersion = "<version>1</version>";
string termStoreIds = string.Format("<termStoreId>{0}</termStoreId>", termStoreId.ToString("D"));
string termSetIds = string.Format("<termSetId>{0}</termSetId>", termSetId.ToString("D"));
string serverTermSetTimestampXml;
string result = proxy.GetTermSets(termStoreIds, termSetIds, 1033, clientTimestamps, clientVersion, out serverTermSetTimestampXml);
var term = ParseTermSetInfo(result);
term.Description = theTermSet.Description;
term.Id = theTermSet.Id;
term.Name = theTermSet.Name;
return term;
}
//public static Term GetTermSetInfo(string site, string termName) {
// var proxy = new SharepointTaxWS.Taxonomywebservice();
// proxy.UseDefaultCredentials = true;
// proxy.PreAuthenticate = true;
// proxy.Url = Path.Combine(site, "_vti_bin/taxonomyclientservice.asmx");
// GetAuthCookie(proxy, site);
// var lciden = 1033; //var lcidno = 1044; // System.Globalization.CultureInfo.CurrentCulture.LCID
// var sets = proxy.GetChildTermsInTermSet(Guid.Parse(""), lciden, Guid.Parse("termsetguid"));
// var term = ParseTermInfo(sets);
// return term;
//}
public static Term GetTermInfo(string site, string termName) {
var proxy = new SharepointTaxWS.Taxonomywebservice();
proxy.UseDefaultCredentials = true;
proxy.PreAuthenticate = true;
proxy.Url = Path.Combine(site, "_vti_bin/taxonomyclientservice.asmx");
GetAuthCookie(proxy, site);
var lciden = 1033; //var lcidno = 1044; // System.Globalization.CultureInfo.CurrentCulture.LCID
var sets = proxy.GetTermsByLabel(termName, lciden, SharepointTaxWS.StringMatchOption.StartsWith, 100, null, false);
var term = ParseTermInfo(sets);
return term;
}
private static TermSet ParseTermSetInfo(string xml) {
//Not done
var info = XDocument.Parse(xml);
var ts = new TermSet();
ts.Terms = new List<Term>();
var n1 = info.XPathSelectElements("//T");
if (n1 != null) {
foreach (var item in n1) {
var t = new Term();
t.Id = item.Attribute("a9").Value;
t.Name = item.XPathSelectElement("LS/TL").Attribute("a32").Value;
t.TermSet = ts;
ts.Terms.Add(t);
}
}
return ts;
}
private static Term ParseTermInfo(string xml) {
var info = XDocument.Parse(xml);
var t = new Term();
var ts = new TermSet();
var n1 = info.XPathSelectElement("TermStore/T");
var n2 = info.XPathSelectElement("TermStore/T/LS/TL");
var n3 = info.XPathSelectElement("TermStore/T/TMS/TM");
if (n1 != null && n1.Attribute("a9") != null) {
t.Id = n1.Attribute("a9").Value;
}
if (n2 != null && n2.Attribute("a32") != null) {
t.Name = n2.Attribute("a32").Value;
}
if (n3 != null && n3.Attribute("a24") != null) {
ts.Id = n3.Attribute("a24").Value;
}
if (n3 != null && n3.Attribute("a12") != null) {
ts.Name = n3.Attribute("a12").Value;
}
t.TermSet = ts;
return t;
}
private static CookieCollection _theAuth;
private static bool _bNoClaims;
static void GetAuthCookie(SoapHttpClientProtocol proxy, string site) {
return;
//if (_bNoClaims) {
// return; //Ingen claims.
//}
//// get the cookie collection - authentication workaround
//CookieCollection cook = null;
//try {
// if (_theAuth == null) {
// cook = ClaimClientContext.GetAuthenticatedCookies(site, 925, 525);
// }
// else {
// cook = _theAuth;
// }
// _theAuth = cook;
// _bNoClaims = false;
//}
//catch (ApplicationException ex) {
// if (ex.Message.Contains("claim")) _bNoClaims = true;
// Console.Write("Auth feilet: " + ex.Message + " - ");
// //IGNORE
//}
//if (_theAuth != null) {
// proxy.CookieContainer = new CookieContainer();
// proxy.CookieContainer.Add(_theAuth);
//}
}
static void ctx_MixedAuthRequest(object sender, WebRequestEventArgs e) {
//add the header that tells SharePoint to use Windows Auth
e.WebRequestExecutor.RequestHeaders.Add("X-FORMS_BASED_AUTH_ACCEPTED", "f");
}
}
public class TermSet {
public string Id { get; set; }
public string Name { get; set; }
public List<Term> Terms { get; set; }
public string TermStoreId { get; set; }
public string Description { get; set; }
public override string ToString() {
int tc = 0;
if (Terms != null) tc = Terms.Count;
return Name + "|" + Id + " (" + tc + "terms)";
}
}
public class Term {
public string Id { get; set; }
public string Name { get; set; }
public TermSet TermSet { get; set; }
public override string ToString() {
return Name + "|" + Id;
}
}
}

Why does this controller double the inserts when I try to archive the results of the Bing Search API?

I'm trying to archive my search results for a term by
Using the Bing API in an async controller
Inserting them into database using Entity Framework
using the Bing API and insert them into a database using entity framework. For whatever reason it is returning 50 results, but then it enters 100 results into the database.
My Controller Code:
public class DHWebServicesController : AsyncController
{
//
// GET: /WebService/
private DHContext context = new DHContext();
[HttpPost]
public void RunReportSetAsync(int id)
{
int iTotalCount = 1;
AsyncManager.OutstandingOperations.Increment(iTotalCount);
if (!context.DHSearchResults.Any(xx => xx.CityMarketComboRunID == id))
{
string strBingSearchUri = #ConfigurationManager.AppSettings["BingSearchURI"];
string strBingAccountKey = #ConfigurationManager.AppSettings["BingAccountKey"];
string strBingUserAccountKey = #ConfigurationManager.AppSettings["BingUserAccountKey"];
CityMarketComboRun cityMarketComboRun = context.CityMarketComboRuns.Include(xx => xx.CityMarketCombo).Include(xx => xx.CityMarketCombo.City).First(xx => xx.CityMarketComboRunID == id);
var bingContainer = new Bing.BingSearchContainer(new Uri(strBingSearchUri));
bingContainer.Credentials = new NetworkCredential(strBingUserAccountKey, strBingAccountKey);
// now we can build the query
Keyword keyword = context.Keywords.First();
var bingWebQuery = bingContainer.Web(keyword.Name, "en-US", "Moderate", cityMarketComboRun.CityMarketCombo.City.Latitude, cityMarketComboRun.CityMarketCombo.City.Longitude, null, null, null);
var bingWebResults = bingWebQuery.Execute();
context.Configuration.AutoDetectChangesEnabled = false;
int i = 1;
DHSearchResult dhSearchResult = new DHSearchResult();
List<DHSearchResult> lst = new List<DHSearchResult>();
var webResults = bingWebResults.ToList();
foreach (var result in webResults)
{
dhSearchResult = new DHSearchResult();
dhSearchResult.BingID = result.ID;
dhSearchResult.CityMarketComboRunID = id;
dhSearchResult.Description = result.Description;
dhSearchResult.DisplayUrl = result.DisplayUrl;
dhSearchResult.KeywordID = keyword.KeywordID;
dhSearchResult.Created = DateTime.Now;
dhSearchResult.Modified = DateTime.Now;
dhSearchResult.Title = result.Title;
dhSearchResult.Url = result.Url;
dhSearchResult.Ordinal = i;
lst.Add(dhSearchResult);
i++;
}
foreach (DHSearchResult c in lst)
{
context.DHSearchResults.Add(c);
context.SaveChanges();
}
AsyncManager.Parameters["message"] = "The total number of results was "+lst.Count+". And there are " + context.DHSearchResults.Count().ToString();
}
else
{
AsyncManager.Parameters["message"] = "You have already run this report";
}
AsyncManager.OutstandingOperations.Decrement(iTotalCount);
}
public string RunReportSetCompleted(string message)
{
string str = message;
return str;
}
}
Here is how I am calling it from my asp.net mvc 4 page.
#Ajax.ActionLink("Run Report", "GatherKeywordsFromBing", "DHWebServices",
new { id=item.CityMarketComboRunID},
new AjaxOptions { OnSuccess = "ShowNotifier();", UpdateTargetId = "TopNotifierMessage", HttpMethod = "POST", InsertionMode = InsertionMode.Replace, LoadingElementId = strCityMarketComboProgressID, LoadingElementDuration = 1000 },
new { #class = "ViewLink" })
<span class="ProgressIndicator" id="#strCityMarketComboProgressID"><img src="#Url.Content("~/Content/img/SmallBall.gif")" alt="loading" /></span>
For whatever reason all of
Try saving only once:
foreach (DHSearchResult c in lst)
{
context.DHSearchResults.Add(c);
}
context.SaveChanges();
Also there's nothing asynchronous in your code, so there's no point of using asynchronous controller. Not only that it won't improve anything but it might make things worse.