NoSuchMethodError : The Setter "movies=" was called on null - api

Future<YifyMovies> getData() async {
var res = await http.get("https://yts.lt/api/v2/list_movies.json");
var decodedJson = jsonDecode(res.body);
YifyMovies movie = YifyMovies();
movie.data.movies = [];
for (var json in decodedJson) {
movie.data.movies.add(Movies.fromJson(json));
}
return movie;
}
Yify Movies class:-
It's the JSON from the above API converted int0 dart.
class YifyMovies {
String status;
String statusMessage;
Data data;
YifyMovies({this.status, this.statusMessage, this.data});
YifyMovies.fromJson(Map<String, dynamic> json) {
status = json['status'];
statusMessage = json['status_message'];
data = json['data'] != null ? new Data.fromJson(json['data']) : null;
}
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = new Map<String, dynamic>();
data['status'] = this.status;
data['status_message'] = this.statusMessage;
if (this.data != null) {
data['data'] = this.data.toJson();
}
return data;
}
}
class Data {
int movieCount;
int limit;
int pageNumber;
List<Movies> movies;
Data({this.movieCount, this.limit, this.pageNumber, this.movies});
Data.fromJson(Map<String, dynamic> json) {
movieCount = json['movie_count'];
limit = json['limit'];
pageNumber = json['page_number'];
if (json['movies'] != null) {
movies = new List<Movies>();
json['movies'].forEach((v) {
movies.add(new Movies.fromJson(v));
});
}
}
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = new Map<String, dynamic>();
data['movie_count'] = this.movieCount;
data['limit'] = this.limit;
data['page_number'] = this.pageNumber;
if (this.movies != null) {
data['movies'] = this.movies.map((v) => v.toJson()).toList();
}
return data;
}
}
class Movies {
int id;
String url;
String imdbCode;
String title;
String titleEnglish;
String titleLong;
String slug;
int year;
double rating;
int runtime;
List<String> genres;
String summary;
String descriptionFull;
String synopsis;
String ytTrailerCode;
String language;
String mpaRating;
String backgroundImage;
String backgroundImageOriginal;
String smallCoverImage;
String mediumCoverImage;
String largeCoverImage;
String state;
List<Torrents> torrents;
String dateUploaded;
int dateUploadedUnix;
Movies(
{this.id,
this.url,
this.imdbCode,
this.title,
this.titleEnglish,
this.titleLong,
this.slug,
this.year,
this.rating,
this.runtime,
this.genres,
this.summary,
this.descriptionFull,
this.synopsis,
this.ytTrailerCode,
this.language,
this.mpaRating,
this.backgroundImage,
this.backgroundImageOriginal,
this.smallCoverImage,
this.mediumCoverImage,
this.largeCoverImage,
this.state,
this.torrents,
this.dateUploaded,
this.dateUploadedUnix});
Movies.fromJson(Map<String, dynamic> json) {
id = json['id'];
url = json['url'];
imdbCode = json['imdb_code'];
title = json['title'];
titleEnglish = json['title_english'];
titleLong = json['title_long'];
slug = json['slug'];
year = json['year'];
rating = json['rating'];
runtime = json['runtime'];
genres = json['genres'].cast<String>();
summary = json['summary'];
descriptionFull = json['description_full'];
synopsis = json['synopsis'];
ytTrailerCode = json['yt_trailer_code'];
language = json['language'];
mpaRating = json['mpa_rating'];
backgroundImage = json['background_image'];
backgroundImageOriginal = json['background_image_original'];
smallCoverImage = json['small_cover_image'];
mediumCoverImage = json['medium_cover_image'];
largeCoverImage = json['large_cover_image'];
state = json['state'];
if (json['torrents'] != null) {
torrents = new List<Torrents>();
json['torrents'].forEach((v) {
torrents.add(new Torrents.fromJson(v));
});
}
dateUploaded = json['date_uploaded'];
dateUploadedUnix = json['date_uploaded_unix'];
}
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = new Map<String, dynamic>();
data['id'] = this.id;
data['url'] = this.url;
data['imdb_code'] = this.imdbCode;
data['title'] = this.title;
data['title_english'] = this.titleEnglish;
data['title_long'] = this.titleLong;
data['slug'] = this.slug;
data['year'] = this.year;
data['rating'] = this.rating;
data['runtime'] = this.runtime;
data['genres'] = this.genres;
data['summary'] = this.summary;
data['description_full'] = this.descriptionFull;
data['synopsis'] = this.synopsis;
data['yt_trailer_code'] = this.ytTrailerCode;
data['language'] = this.language;
data['mpa_rating'] = this.mpaRating;
data['background_image'] = this.backgroundImage;
data['background_image_original'] = this.backgroundImageOriginal;
data['small_cover_image'] = this.smallCoverImage;
data['medium_cover_image'] = this.mediumCoverImage;
data['large_cover_image'] = this.largeCoverImage;
data['state'] = this.state;
if (this.torrents != null) {
data['torrents'] = this.torrents.map((v) => v.toJson()).toList();
}
data['date_uploaded'] = this.dateUploaded;
data['date_uploaded_unix'] = this.dateUploadedUnix;
return data;
}
}
class Torrents {
String url;
String hash;
String quality;
String type;
int seeds;
int peers;
String size;
int sizeBytes;
String dateUploaded;
int dateUploadedUnix;
Torrents(
{this.url,
this.hash,
this.quality,
this.type,
this.seeds,
this.peers,
this.size,
this.sizeBytes,
this.dateUploaded,
this.dateUploadedUnix});
Torrents.fromJson(Map<String, dynamic> json) {
url = json['url'];
hash = json['hash'];
quality = json['quality'];
type = json['type'];
seeds = json['seeds'];
peers = json['peers'];
size = json['size'];
sizeBytes = json['size_bytes'];
dateUploaded = json['date_uploaded'];
dateUploadedUnix = json['date_uploaded_unix'];
}
Map<String, dynamic> toJson() {
final Map<String, dynamic> data = new Map<String, dynamic>();
data['url'] = this.url;
data['hash'] = this.hash;
data['quality'] = this.quality;
data['type'] = this.type;
data['seeds'] = this.seeds;
data['peers'] = this.peers;
data['size'] = this.size;
data['size_bytes'] = this.sizeBytes;
data['date_uploaded'] = this.dateUploaded;
data['date_uploaded_unix'] = this.dateUploadedUnix;
return data;
}
}
Any help, please? Tell me if any more info is required?
I'm trying to list all the movies and use them for tabview in flutter which can show them in listview as well as gridview. I'm using Bloc architecture for layouts so YifyMovie class is in hom_model and the code above is in home_provider.

Your YifyMovies class is already handling the parsing for you.
So your code should change from
Future<YifyMovies> getData() async {
var res = await http.get("https://yts.lt/api/v2/list_movies.json");
var decodedJson = jsonDecode(res.body);
YifyMovies movie = YifyMovies();
movie.data.movies = [];
for (var json in decodedJson) {
movie.data.movies.add(Movies.fromJson(json));
}
return movie;
}
to
Future<YifyMovies> getData() async {
var res = await http.get("https://yts.lt/api/v2/list_movies.json");
var decodedJson = jsonDecode(res.body);
YifyMovies movie = YifyMovies.fromJson(decodedJson);
return movie;
}

Related

how use memorycache in this method?

how can i add memorycache in this method ?
this is a section of my code that i want to set memory cache on it.
public IActionResult Index(int pageId = 1, string filter = "",
int startPrice = 0, int endPrice = 0, string getType = "", string orderByType = "date",
List<int> selectedGroups = null, List<int> selectedBrand = null, List<int> selectedTags = null
, List<int> selectedsize = null , string Discount = "")
{
ViewBag.selectedGroups = selectedGroups;
ViewBag.selectedTags = selectedTags;
ViewBag.selectedsize = selectedsize;
ViewBag.Discount = Discount;
ViewBag.getType = getType;
ViewBag.Groups = _productService.GetAllGroup();
ViewBag.Tags = _productService.GetTags().Where(c => c.ActiveRow).ToList();
ViewBag.size = _productService.GetSizes().ToList();
ViewBag.pageId = pageId;
return View(_productService.GetProducttype(pageId, filter, startPrice, endPrice, getType, orderByType, selectedGroups, selectedBrand, 24, selectedTags, selectedsize, Discount));
}
private readonly IMemoryCache _memoryCache;
public Constructor (IMemoryCache memoryCache)
{
_memoryCache = memoryCache;
}
public IActionResult Index(int pageId = 1, string filter = "",
int startPrice = 0, int endPrice = 0, string getType = "", string orderByType = "date",
List<int> selectedGroups = null, List<int> selectedBrand = null, List<int> selectedTags = null
, List<int> selectedsize = null , string Discount = "")
{
ViewBag.selectedGroups = selectedGroups;
ViewBag.selectedTags = selectedTags;
ViewBag.selectedsize = selectedsize;
ViewBag.Discount = Discount;
ViewBag.getType = getType;
var groups = new List<Group>();
if (_memoryCache.TryGetValue("groups", out groups)
{
ViewBag.Groups = groups;
}
else
{
groups = _productService.GetAllGroup();
_memoryCache.Set("groups", groups);
ViewBag.Groups = groups;
}
var tags = new List<Tag>();
if (_memoryCache.TryGetValue("tags", out tags)
{
ViewBag.Tags = tags;
}
else
{
tags = _productService.GetTags().Where(c => c.ActiveRow).ToList();
_memoryCache.Set("tags", tags);
ViewBag.Tags = tags;
}
var sizes = new List<Size>();
if (_memoryCache.TryGetValue("sizes", out sizes)
{
ViewBag.size = sizes;
}
else
{
sizes = _productService.GetSizes().ToList();
_memoryCache.Set("sizes", sizes);
ViewBag.size = sizes;
}
var pageId = null;
if (_memoryCache.TryGetValue("pageId", out pageId))
{
ViewBag.pageId = pageId;
}
else
{
_memoryCache.Set("pageId", pageId);
ViewBag.pageId = pageId;
}
return View(_productService.GetProducttype(pageId, filter, startPrice, endPrice, getType, orderByType, selectedGroups, selectedBrand, 24, selectedTags, selectedsize, Discount));
}

Error when uploading file using Microsoft Graph API

I am trying to upload a large file to One Drive using Microsoft Graph API.
Uploading to One Drive works normally, but the file is damaged.
Please help me to solve the problem.
public ActionResult UploadLargeFiles(string id, [FromForm]IFormFile files)
{
string fileName = files.FileName;
int fileSize = Convert.ToInt32(files.Length);
var uploadProvider = new JObject();
var res = new JArray();
var isExistence = _mailService.GetUploadFolder(id);
if (isExistence != HttpStatusCode.OK)
{
var createFolder = _mailService.CreateUploadFolder(id);
if (createFolder != HttpStatusCode.Created)
{
return BadRequest(ModelState);
}
}
if (files.Length > 0)
{
var uploadSessionUrl = _mailService.CreateUploadSession(id, fileName);
if (uploadSessionUrl != null)
{
if (fileSize < 4194304)
{
uploadProvider = _mailService.UploadByteFile(id, uploadSessionUrl, files);
res.Add(uploadProvider);
}
}
else
{
return BadRequest(ModelState);
}
}
return Ok();
}
createUploadSession
public string CreateUploadSession(string upn, string fileName)
{
var uploadSession = _mailGraphService.CreateUploadSession(upn, fileName).Result;
var sessionResult = new UploadSessionDTO(uploadSession);
return sessionResult.uploadUrl;
}
public async Task<UploadSessionDTO> CreateUploadSession(string upn, string fileName)
{
this.InitHttpClient();
var jObject = JObject.FromObject(new { item = new Dictionary<string, object> { { "#microsoft.graph.conflictBehavior", "rename" } }, fileSystemInfo = new Dictionary<string, object> { { "#odata.type", "microsoft.graph.fileSystemInfo" } }, name = fileName });
var toJson = JsonConvert.SerializeObject(jObject);
var content = new StringContent(toJson, Encoding.UTF8, "application/json");
var response = await _client.PostAsync("users/"+ upn + "/drive/root:/MailFiles/" + fileName +":/createUploadSession", content);
if (!response.IsSuccessStatusCode)
return null;
var strData = await response.Content.ReadAsStringAsync();
dynamic uploadSession = JsonConvert.DeserializeObject<UploadSessionDTO>(strData);
return uploadSession;
}
public JObject LargeFileUpload(string upn, string url, IFormFile files)
{
var responseCode = HttpStatusCode.OK;
var jObject = new JObject();
int idx = 0;
int fileSize = Convert.ToInt32(files.Length);
int fragSize = 4 * 1024 * 1024; //4MB => 4 * 1024 * 1024;
var byteRemaining = fileSize;
var numFragments = (byteRemaining / fragSize) + 1;
while (idx < numFragments)
{
var chunkSize = fragSize;
var start = idx * fragSize;
var end = idx * fragSize + chunkSize - 1;
var offset = idx * fragSize;
if (byteRemaining < chunkSize)
{
chunkSize = byteRemaining;
end = fileSize - 1;
}
var contentRange = " bytes " + start + "-" + end + "/" + fileSize;
byte[] file = new byte[chunkSize];
using (var client = new HttpClient())
{
var content = new ByteArrayContent(file);
content.Headers.Add("Content-Length", chunkSize.ToString());
content.Headers.Add("Content-Range", contentRange);
var response = client.PutAsync(url, content);
var strData = response.Result.Content.ReadAsStringAsync().Result;
responseCode = response.Result.StatusCode;
//업로드 성공
if (responseCode == HttpStatusCode.Created)
{
JObject data = JObject.Parse(strData);
string downloadUrl = data["#content.downloadUrl"].ToString();
string itemId = data["id"].ToString();
//파일 크기 -> kb로 변환
fileSize = fileSize / 1000;
jObject = JObject.FromObject(new { name = files.Name, id = itemId, url = downloadUrl, size = (double)fileSize });
}
//업로드 충돌
else if (responseCode == HttpStatusCode.Conflict)
{
var restart = RestartByteFile(upn, url, files.Name);
responseCode = restart;
}
}
byteRemaining = byteRemaining - chunkSize;
idx++;
}
if (responseCode == HttpStatusCode.Created) { return jObject; }
else return jObject = JObject.FromObject(new { result = "실패" });
}
When I checked OneDrive, the file was uploaded normally, and when I downloaded and opened the file, it came out as a damaged file.
I wonder why the file gets corrupted when uploaded, and how to fix it.
If the problem cannot be solved, please let us know that it cannot be solved.

error not applied operand string to int null type

public ActionResult update( int? id)
{
BusDetailsModel model = new BusDetailsModel();
using (var ent = new OnlineReservation.AddData.OnlineReservationEntities1())
{
var db = ent.BusDetails.FirstOrDefault(x=>x.busId ==id);
model.BusId = db.busId;
model.Busname = db.busname;
model.Busowner = db.busowner;
model.Bustype = db.busType;
model.Buscapacity = db.busCapacity;
}
return View(model);
}

Android - Get attributes from CDATA section using XMLPullParser

The xml looks like this (out of a RSS feed):
<description>
<![CDATA[
<div><a href='articleURL'><img src='pic.jpg' alt='blah blah' title='blah blah'
border='0' width='100' height='56'></a></div>
]]>
gist of the article.....
</description>
I want to get the following attributes:
img src - holding the article pic
the article gist at the end of the tag but when running I get a NullPointer Ex.
All the rest (outside the CDATA section work just fine...)
the code I used:
class BackgroundParser extends AsyncTask<String, String, Integer>{
int headlineCount = 0;
String headlineTitle = "";
Bitmap pic = null;
String xmlDate = "";
String gist = "";
String articleUrl = "";
#Override
protected Integer doInBackground(String... params) {
// TODO Auto-generated method stub
try {
URL rssFeed = new URL(params[0]);
XmlPullParserFactory factory = XmlPullParserFactory.newInstance();
XmlPullParser parser = factory.newPullParser();
InputStream is = rssFeed.openStream();
parser.setInput(is, null);
boolean item = false;
boolean title = false;
boolean date = false;
boolean description = false;
boolean link = false;
String tagName;
int eventType = parser.getEventType();
while(eventType!=XmlPullParser.END_DOCUMENT){
if(eventType==XmlPullParser.START_TAG){
tagName = parser.getName();
if(item){
if(tagName.equals("title"))title = true;
if(tagName.equals("description")){
String img = parser.getAttributeValue(null, "img src");
Log.i("Info", img);
pic = getBitmapFromURL(img);
}
if(tagName.equals("pubDate"))date = true;
if(tagName.equals("description"))description = true;
if(tagName.equals("link"))link = true;
}
else{
if(tagName.equals("item"))item = true;
}
}
if(eventType==XmlPullParser.END_TAG){
tagName = parser.getName();
if(tagName.equals("item")){
item = false;
headlines.add(new Headline(headlineTitle,xmlDate,pic,gist,articleUrl));
headlineTitle = null; xmlDate = null; pic = null; gist = null; articleUrl = null;
headlineCount++;
}
}
if(eventType==XmlPullParser.TEXT){
if(title){
headlineTitle = parser.getText();
Log.i("Info", headlineTitle);
title = false;
}
if(date){
xmlDate = parser.getText();
Log.i("Info", xmlDate);
date = false;
}
if(description){
gist = parser.getText();
Log.i("info",gist);
description = false;
}
if(link){
articleUrl = parser.getText();
Log.i("info", articleUrl);
link = false;
}
}
eventType = parser.next();
}
This is what I did:
if(tagName.equals("description")){
int token = parser.nextToken();
while(token!=XmlPullParser.CDSECT){
token = parser.nextToken();
}
String cdata = parser.getText();
Log.i("Info", cdata);
String result = cdata.substring(cdata.indexOf("src='")+5, cdata.indexOf("jpg")+3);
Log.i("Info", result);
pic = getBitmapFromURL(result);
}
Is there a more elegant way of doing this???

Getting all the Term Stores in Sharepoint 2010 (web services or client-side object model)?

Is it possible with Sharepoint 2010 (not 2013!) to get a list of all the Term Stores on the site using either the web services or the client-side object model?
I know 2013 has added a library for it, but that will not help me on 2010.
If not the whole list, how do I get the Term Store ID, if I know a Term (that might or might not be in the TaxonomyHiddenList)?
Someone mentioned checking out the TaxonomyFieldType fields, so I hacked together these 2 methods. I do not know if these will work under all circumstances.
First function just returns the Term Store ID which is stored in the info of the first TaxonomyFieldType* we come over.
public static string GetDefaultTermStore(string site) {
var context = new ClientContext(site);
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
return node.Value;
}
}
}
throw new Exception("Term Store ID not found!");
}
The second function goes through all the fields and gets all the possible Term Store IDs and returns them in a list.
public static List<string> GetTermStores(string site) {
var context = new ClientContext(site);
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
var hashlist = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
if (!hashlist.Contains(node.Value)) {
hashlist.Add(node.Value);
}
}
}
}
if (hashlist.Count == 0) throw new Exception("No Term Store IDs not found!");
return hashlist.ToList();
}
Is this a correct answer to my question do anyone have a more sure way to get the IDs?
Does not seem like anyone else has a good answer for this question.
I have added the utility class I made from this below. Big block of uncommented code below for those who might need:
using Microsoft.SharePoint.Client;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Web.Services.Protocols;
using System.Windows.Forms;
using System.Xml.Linq;
using System.Xml.XPath;
namespace VitaminTilKanbanPusher.Sharepoint {
public class SharepointTaxonomyAgent {
//URLS:
//http://www.novolocus.com/2012/02/06/working-with-the-taxonomyclientservice-part-1-what-fields-are-there/
//
public static void Test() {
var site = ConfigurationManager.AppSettings["VitaminSite"];
//var list = ConfigurationManager.AppSettings["VitaminList"];
//var id = GetDefaultTermStore(site);
//var ids = GetTermStores(site);
var rs = GetAllTermSetNames(site);
var ts = GetTermSetTerms(site, "Some Name");
//var ts = GetTermSetTerms(site, "Some other name");
//var term = GetTermInfo(site, "Priority");
//var term2 = GetTermInfo(site, "My term");
//var termset = GetTermSetInfo(site, "My term");
//var termsets = GetTermSets(site, "My term");
}
public static string GetDefaultTermStore(string site) {
var context = new ClientContext(site);
context.ExecutingWebRequest += ctx_MixedAuthRequest;
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.InternalName, f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
foreach (var field in fields) {
//field.InternalName== "TaxKeyword" -> possibly default?
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
return node.Value;
}
}
}
throw new Exception("Term Store ID not found!");
}
public static List<string> GetTermStores(string site) {
var context = new ClientContext(site);
context.ExecutingWebRequest += ctx_MixedAuthRequest;
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
var hashlist = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var doc = XDocument.Parse(field.SchemaXml);
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
if (!hashlist.Contains(node.Value)) {
hashlist.Add(node.Value);
}
}
}
}
if (hashlist.Count == 0) throw new Exception("No Term Store IDs not found!");
return hashlist.ToList();
}
private static List<TermSet> _termSets;
public static List<TermSet> GetAllTermSetNames(string site, string onlySpecificTermSetName = null) {
if (_termSets != null) {
if (onlySpecificTermSetName == null) return _termSets;
foreach (var ts in _termSets) {
if (ts.Name.Equals(onlySpecificTermSetName, StringComparison.InvariantCultureIgnoreCase)) {
return new List<TermSet>() { ts };
}
}
return new List<TermSet>();
}
var context = new ClientContext(site);
context.ExecutingWebRequest += ctx_MixedAuthRequest;
var fields = context.Web.Fields;
context.Load(fields, fs => fs.Include(f => f.SchemaXml, f => f.TypeAsString));
context.ExecuteQuery();
var hashlist = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
var termSets = new List<TermSet>();
TermSet theChosenTermSet = null;
foreach (var field in fields) {
if (field.TypeAsString.StartsWith("TaxonomyFieldType")) {
var ts = new TermSet();
var doc = XDocument.Parse(field.SchemaXml);
var fn = doc.Element("Field");
if (fn == null) continue;
if (fn.Attribute("DisplayName") == null) continue;
if (fn.Attribute("ID") == null) continue;
ts.Name = fn.Attribute("DisplayName").Value;
//Only 1 set?
if (onlySpecificTermSetName != null) {
if (!ts.Name.Equals(onlySpecificTermSetName, StringComparison.InvariantCultureIgnoreCase)) {
theChosenTermSet = ts;
}
}
if (fn.Attribute("Description") != null) {
ts.Description = fn.Attribute("Description").Value;
}
var node = doc.XPathSelectElement("//Name[text()='SspId']/../Value");
if (node != null && !string.IsNullOrEmpty(node.Value)) {
ts.TermStoreId = node.Value;
}
var node2 = doc.XPathSelectElement("//Name[text()='TermSetId']/../Value");
if (node2 != null && !string.IsNullOrEmpty(node2.Value)) {
ts.Id = node2.Value;
}
else {
continue; //No ID found
}
//Unique hites
if (!hashlist.Contains(ts.Id)) {
hashlist.Add(ts.Id);
termSets.Add(ts);
}
}
}
_termSets = termSets;
if (onlySpecificTermSetName != null) return (theChosenTermSet == null ? new List<TermSet>() : new List<TermSet>() { theChosenTermSet });
return termSets;
}
public static TermSet GetTermSetTerms(string site, string termName) {
var ts = GetAllTermSetNames(site, termName);
if (ts.Count == 0) throw new Exception("Could not find termset: " + termName);
var theTermSet = ts[0];
var proxy = new SharepointTaxWS.Taxonomywebservice();
proxy.UseDefaultCredentials = true;
proxy.PreAuthenticate = true;
proxy.Url = Path.Combine(site, "_vti_bin/taxonomyclientservice.asmx");
GetAuthCookie(proxy, site);
var lciden = 1033; //var lcidno = 1044; // System.Globalization.CultureInfo.CurrentCulture.LCID
var clientTime = DateTime.Now.AddYears(-2).ToUniversalTime().Ticks.ToString();
var termStoreId = new Guid(theTermSet.TermStoreId);// Guid.Parse(theTermSet.TermStoreId);
var termSetId = new Guid(theTermSet.Id);
string clientTimestamps = string.Format("<timeStamp>{0}</timeStamp>", clientTime);
string clientVersion = "<version>1</version>";
string termStoreIds = string.Format("<termStoreId>{0}</termStoreId>", termStoreId.ToString("D"));
string termSetIds = string.Format("<termSetId>{0}</termSetId>", termSetId.ToString("D"));
string serverTermSetTimestampXml;
string result = proxy.GetTermSets(termStoreIds, termSetIds, 1033, clientTimestamps, clientVersion, out serverTermSetTimestampXml);
var term = ParseTermSetInfo(result);
term.Description = theTermSet.Description;
term.Id = theTermSet.Id;
term.Name = theTermSet.Name;
return term;
}
//public static Term GetTermSetInfo(string site, string termName) {
// var proxy = new SharepointTaxWS.Taxonomywebservice();
// proxy.UseDefaultCredentials = true;
// proxy.PreAuthenticate = true;
// proxy.Url = Path.Combine(site, "_vti_bin/taxonomyclientservice.asmx");
// GetAuthCookie(proxy, site);
// var lciden = 1033; //var lcidno = 1044; // System.Globalization.CultureInfo.CurrentCulture.LCID
// var sets = proxy.GetChildTermsInTermSet(Guid.Parse(""), lciden, Guid.Parse("termsetguid"));
// var term = ParseTermInfo(sets);
// return term;
//}
public static Term GetTermInfo(string site, string termName) {
var proxy = new SharepointTaxWS.Taxonomywebservice();
proxy.UseDefaultCredentials = true;
proxy.PreAuthenticate = true;
proxy.Url = Path.Combine(site, "_vti_bin/taxonomyclientservice.asmx");
GetAuthCookie(proxy, site);
var lciden = 1033; //var lcidno = 1044; // System.Globalization.CultureInfo.CurrentCulture.LCID
var sets = proxy.GetTermsByLabel(termName, lciden, SharepointTaxWS.StringMatchOption.StartsWith, 100, null, false);
var term = ParseTermInfo(sets);
return term;
}
private static TermSet ParseTermSetInfo(string xml) {
//Not done
var info = XDocument.Parse(xml);
var ts = new TermSet();
ts.Terms = new List<Term>();
var n1 = info.XPathSelectElements("//T");
if (n1 != null) {
foreach (var item in n1) {
var t = new Term();
t.Id = item.Attribute("a9").Value;
t.Name = item.XPathSelectElement("LS/TL").Attribute("a32").Value;
t.TermSet = ts;
ts.Terms.Add(t);
}
}
return ts;
}
private static Term ParseTermInfo(string xml) {
var info = XDocument.Parse(xml);
var t = new Term();
var ts = new TermSet();
var n1 = info.XPathSelectElement("TermStore/T");
var n2 = info.XPathSelectElement("TermStore/T/LS/TL");
var n3 = info.XPathSelectElement("TermStore/T/TMS/TM");
if (n1 != null && n1.Attribute("a9") != null) {
t.Id = n1.Attribute("a9").Value;
}
if (n2 != null && n2.Attribute("a32") != null) {
t.Name = n2.Attribute("a32").Value;
}
if (n3 != null && n3.Attribute("a24") != null) {
ts.Id = n3.Attribute("a24").Value;
}
if (n3 != null && n3.Attribute("a12") != null) {
ts.Name = n3.Attribute("a12").Value;
}
t.TermSet = ts;
return t;
}
private static CookieCollection _theAuth;
private static bool _bNoClaims;
static void GetAuthCookie(SoapHttpClientProtocol proxy, string site) {
return;
//if (_bNoClaims) {
// return; //Ingen claims.
//}
//// get the cookie collection - authentication workaround
//CookieCollection cook = null;
//try {
// if (_theAuth == null) {
// cook = ClaimClientContext.GetAuthenticatedCookies(site, 925, 525);
// }
// else {
// cook = _theAuth;
// }
// _theAuth = cook;
// _bNoClaims = false;
//}
//catch (ApplicationException ex) {
// if (ex.Message.Contains("claim")) _bNoClaims = true;
// Console.Write("Auth feilet: " + ex.Message + " - ");
// //IGNORE
//}
//if (_theAuth != null) {
// proxy.CookieContainer = new CookieContainer();
// proxy.CookieContainer.Add(_theAuth);
//}
}
static void ctx_MixedAuthRequest(object sender, WebRequestEventArgs e) {
//add the header that tells SharePoint to use Windows Auth
e.WebRequestExecutor.RequestHeaders.Add("X-FORMS_BASED_AUTH_ACCEPTED", "f");
}
}
public class TermSet {
public string Id { get; set; }
public string Name { get; set; }
public List<Term> Terms { get; set; }
public string TermStoreId { get; set; }
public string Description { get; set; }
public override string ToString() {
int tc = 0;
if (Terms != null) tc = Terms.Count;
return Name + "|" + Id + " (" + tc + "terms)";
}
}
public class Term {
public string Id { get; set; }
public string Name { get; set; }
public TermSet TermSet { get; set; }
public override string ToString() {
return Name + "|" + Id;
}
}
}