I have 1 page whick give to user add new car to his cars collection.
User MUST to add minimum 1 image to this car item.
In AWS S3 service I save images in this path architecture:
bucket_name/cars/HERE_IS_USER_ID/HERE_IS_CAR_ID/and here photos
I use fine uploader.
ANd If i do add car in 2 step - all is going good
In 1 step I add to my database car - and send from server side userId and new car Id.
And then in step 2 I init Fine uploader with this id:
Below Code:
(function () {
'use strict';
angular
.module('cars.services.fineUploader', ['ngResource'])
.factory('FineUploader', ['$resource', '$http', 'AppConfig', function ($resource, $http, AppConfig) {
return function (divId, templateName, autoUpload, foldersBtn, uploadSuccessCallback, type, parentId, id) {
var key = '';
var keyExist = false;
getKey();
if (!qq.supportedFeatures.folderSelection) {
document.getElementById(foldersBtn).style.display = "none";
}
var uploader = new qq.s3.FineUploader({
element: document.getElementById(divId),
template: templateName,
autoUpload: autoUpload,
request: {
endpoint: 'carsbucket.s3.amazonaws.com',
accessKey: 'ACCESS',
},
extraButtons: [
{
element: document.getElementById(foldersBtn),
folders: true
}
],
signature: {
endpoint: AppConfig.apiUrl + 'api/AmazonS3/GetSignature'
},
uploadSuccess: {
endpoint: uploadSuccessCallback()
},
iframeSupport: {
localBlankPagePath: '/success.html'
},
objectProperties: {
key: function (id) {
var fileName = uploader.getName(id);
var ext = qq.getExtension(fileName);
return key+ '/' + fileName + "." + ext;
}
}
});
return {
Uploader: function () {
return uploader;
},
StartUpload: function () {
uploader.uploadStoredFiles();
},
InitUploader: function () {
while (!keyExist) {
getKey();
}
}
};
function getKey() {
var itemRequest = {
"ItemType": type,
"ParentId": parentId,
"ItemId": id
};
$http.post("http://localhost:42309/api/AmazonS3/GetItemKey", itemRequest).success(function (data, status) {
key = data;
keyExist = true;
});
}
}
}]);
})();
And this is code how I init this service:
function Uploader(shopId, shopItemId) {
vm.step = 2;
uploader = FineUploader('s3FU', 'qq-template-gallery', false, 'foldersButton', testCallback, 1, shopId, shopItemId);
}
Now I want to do all ADD ITEM logic in 1 step
But I dont know how do it - because I need init FineUploader - but I dont have carId and UserId
.........
Related
I want to put 'mqtt' value in span. But it doesn't work.
I think the loading method is wrong, but I don't know how to do it.
I don't know if the code below will suffice. Any help would be greatly appreciated.
Someone else coded similar to one page. Someone else's code has a value in 'detail', but my code doesn't. Why?
HTML
span{{ detail }}span //empty with nothing
Front script1
export default {
props: {
detail: {
type: Object
},
isAdd: {
type: Boolean,
default: false
},
},
data() {
return {};
},
mounted() {
this.$mqtt.subscribe('#');
},
methods: {
SendData() {
var temp = [];
var name = Number(document.getElementsByClassName('name').innerHTML);
temp.push(name);
var temp_current = data.payload.Temp_Current;
var error = data.payload.Error;
var data = {
//workcd: this.detail.namemodel,
Temp_Current: temp_current,
Error: error,
};
data = JSON.stringify(data);
var pub_name_arr = this.detail.name.split(' ');
var pub_name = 'CCComandTopic';
this.$mqtt.publish(
pub_name + '/' + pub_name_arr[1] + '/' + pub_name_arr[2],
data,
);
},
}
}
Front script 2
showHotrunner(namemodel, name, data) {
this.$modal.show(
Hotrunner,
{detail: {namemodel, name, data}, isAdd: true},
{width: '1040', height: '700', draggable: true},
);
const canvas = document.getElementById('three-canvas');
canvas.classList.add('noclick');
}
Back
var device = msg.topic.split("/")[2]
if(device == "hopper")
{
var temp_current = msg.payload.Temp_Current
var error = msg.payload.error
msg = {
topic : "CCComandTopic/hopper/1",
payload : {
ID: id,
"Error": error,
Temp_Current: temp_current,
}
}.
node.send(msg)
}
I would like to sendback FullCalendar events to .NET with an AJAX request. I create a custom button for that :
#{
ViewData["Title"] = "Planning visites";
Layout = "~/Views/Shared/_Layout.cshtml";
}
<h1>#ViewData["Title"]</h1>
<div id='calendar'></div>
#section scripts
{
<script>
let date = new Date();
let month = String(date.getMonth() + 1).padStart(2, '0');
let day = String(date.getDate()).padStart(2, '0');
let year = date.getFullYear();
let dateDuJour = year + '-' + month + '-' + day;
let dateDuJourplusunan = (year + 1) + '-' + month + '-' + day;
document.addEventListener('DOMContentLoaded', function() {
var calendarEl = document.getElementById('calendar');
var calendar = new FullCalendar.Calendar(calendarEl, {
//themeSystem: 'bootstrap5',
//plugins: [ timeGridPlugin ],
initialView: 'timeGridWeek',
selectable: true,
selectOverlap: false,
//selectMirror: true,
validRange: {
start: dateDuJour,
end: dateDuJourplusunan
},
customButtons: {
enregistrermodifs: {
text: 'enregistrer',
click: function() {
var eventsobj = calendar.getEvents();
var data = JSON.stringify(eventsobj);
//var data = JSON.serialize(eventsobj);
alert(data);
$.ajax({
type: 'POST',
url: '#Url.Action("MAJAgenda","Agenda")',
//contentType: 'application/x-www-form-urlencoded; charset=UTF-8', // when we use .serialize() this generates the data in query string format. this needs the default contentType (default content type is: contentType: 'application/x-www-form-urlencoded; charset=UTF-8') so it is optional, you can remove it
contentType: 'application/json; charset=utf-8',
data: data,
success: function(result) {
alert('Successfully received Data ');
console.log(result);
},
error: function() {
alert('Erreur : enregistrement non effectué');
console.log('Failed');
}
});
}
}
},
headerToolbar: {
left: 'prev,next,today,enregistrermodifs',
center: 'title',
right: 'dayGridMonth,timeGridWeek,dayGridDay'
},
buttonText: {
today: 'Aujourdhui',
month: 'mois',
week: 'semaine',
timeGridWeek: 'jour',
day: 'jour',
list: 'liste'
},
initialDate: dateDuJour,
navLinks: true, // can click day/week names to navigate views
editable: true,
dayMaxEvents: true, // allow "more" link when too many events
events:'#Url.Action("RecupDonneesAgenda","Agenda")?annonceId=#ViewBag.AnnonceId',
select: function(info) {
calendar.addEvent({
//id: info.startStr,
title: 'Indisponibilité',
start: info.startStr,
end: info.endStr,
allDay: false
});
},
eventClick: function(info) {
var eventobj = info.event;
eventobj.remove();
}
});
calendar.render();
calendar.setOption('locale', 'fr');
});
</script>
}
Here's my model in .NET :
public class AgendaAJAX
{
public string? title { get; set; }
public DateTime? start { get; set; }
public DateTime? end { get; set; }
}
And here's my action method :
[HttpPost]
public async Task<IActionResult> MAJAgenda(AgendaAJAX? agenda)
{
string userID = User.FindFirstValue(ClaimTypes.NameIdentifier);
////List<Agenda> agenda = await _context.Agendas.Where(a => a.AnnonceID == annonceId && a.Personne1 == userID).ToListAsync();
//JsonResult result = new JsonResult(agenda);
return View("Agenda");
}
THE AJAX call works well : here's the JSON data :
[{"title":"Indisponibilité","start":"2022-06-09T07:30:00+02:00","end":"2022-06-09T12:30:00+02:00"},{"title":"Indisponibilité","start":"2022-06-10T10:00:00+02:00","end":"2022-06-10T15:00:00+02:00"},{"title":"Indisponibilité","start":"2022-06-11T07:00:00+02:00","end":"2022-06-11T09:30:00+02:00"}]
The problem is that I don't receive any data in the action method : agenda remains null.
I don't understand why.
If you can help me please.
Thanks.
Trying to implement Google Chart with ASP.Net CORE MVC.
Been at it for two days, but I can not figure out my mistake. I don't get an error, and I can see the array in the console, but no data.
VIEWMODEL
public class ZipCodes
{
public string ZipCode { get; set; }
public int ZipCount { get; set; }
}
CONTROLLER
public ActionResult IncidentsByZipCode()
{
var incidentsByZipCode = (from o in _context.Incident
group o by o.ZipCode into g
orderby g.Count() descending
select new
{
ZipCode = g.Key,
ZipCount = g.Count()
}).ToList();
return Json(incidentsByZipCode);
}
VIEW
function IncidentsByZipCode() {
$.ajax({
type: 'GET',
url: '#Url.Action("IncidentsByZipCode", "Controller")',
success: function (response) {
console.log(response);
var data = new google.visualization.DataTable();
data.addColumn('string', 'ZipCode');
data.addColumn('number', 'ZipCount');
for (var i = 0; i < response.result.length; i++) {
data.addRow([response.result[i].ZipCode, response.result[i].ZipCount]);
}
var chart = new google.visualization.ColumnChart(document.getElementById('incidentsByZipCode'));
chart.draw(data,
{
title: "",
position: "top",
fontsize: "14px",
chartArea: { width: '100%' },
});
},
error: function () {
alert("Error loading data!");
}
});
}
Because the api you use is not Column Chart, the data cannot be added and rendered correctly. According to the official example, you need to make some changes.
Here is the ajax code.
<script>
//Generate random colors
function bg() {
var r = Math.floor(Math.random() * 256);
var g = Math.floor(Math.random() * 256);
var b = Math.floor(Math.random() * 256);
return "rgb(" + r + ',' + g + ',' + b + ")";
}
function IncidentsByZipCode() {
$.ajax({
type: 'GET',
url: '#Url.Action("IncidentsByZipCode","home")',
success: function (response) {
google.charts.load('current', { packages: ['corechart'] });
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
var data = new google.visualization.DataTable();
var obj = [
["Element", "Density", { role: "style" }],
];
$.each(response, function (index, value) {
obj.push([value.zipCode, value.zipCount, bg()])
})
var data = google.visualization.arrayToDataTable(obj);//This is method of Column Chart
var view = new google.visualization.DataView(data);
view.setColumns([0, 1,
{
calc: "stringify",
sourceColumn: 1,
type: "string",
role: "annotation"
},
2]);
var chart = new google.visualization.ColumnChart(document.getElementById('incidentsByZipCode'));
chart.draw(data,
{
title: "",
position: "top",
fontsize: "14px",
chartArea: { width: '100%' },
});
}
},
error: function () {
alert("Error loading data!");
}
});
}
IncidentsByZipCode()
This is the controller.
public ActionResult IncidentsByZipCode()
{
//var incidentsByZipCode = (from o in _context.Incident
// group o by o.ZipCode into g
// orderby g.Count() descending
// select new
// {
// ZipCode = g.Key,
// ZipCount = g.Count()
// }).ToList();
var incidentsByZipCode = new List<ZipCodes>
{
new ZipCodes{ ZipCode="code1", ZipCount=3},
new ZipCodes{ZipCode="code2",ZipCount=4},
new ZipCodes{ZipCode="code3",ZipCount=2},
new ZipCodes{ZipCode="code4",ZipCount=9},
};
return Json(incidentsByZipCode);
}
Result, and you can also refer to this document
.
I am able to run tests locally on my remote selenium server and they run just fine.
When I go to run them on my Jenkins box on the same remote selenium server in Jenkins I am getting No Specs found, and in the output of my selenium server I am seeing the following:
21:33:41.256 INFO - Executing: [execute async script: try { return (function (attempts, ng12Hybrid, asyncCallback) {
var callback = function(args) {
setTimeout(function() {
asyncCallback(args);
}, 0);
};
var check = function(n) {
try {
if (!ng12Hybrid && window.getAllAngularTestabilities) {
callback({ver: 2});
} else if (window.angular && window.angular.resumeBootstrap) {
callback({ver: 1});
} else if (n < 1) {
if (window.angular) {
callback({message: 'angular never provided resumeBootstrap'});
} else {
callback({message: 'retries looking for angular exceeded'});
}
} else {
window.setTimeout(function() {check(n - 1);}, 1000);
}
} catch (e) {
callback({message: e});
}
};
check(attempts);
}).apply(this, arguments); }
catch(e) { throw (e instanceof Error) ? e : new Error(e); }, [10, false]])
21:33:41.273 INFO - Done: [execute async script: try { return (function (attempts, ng12Hybrid, asyncCallback) {
var callback = function(args) {
setTimeout(function() {
asyncCallback(args);
}, 0);
};
var check = function(n) {
try {
if (!ng12Hybrid && window.getAllAngularTestabilities) {
callback({ver: 2});
} else if (window.angular && window.angular.resumeBootstrap) {
callback({ver: 1});
} else if (n < 1) {
if (window.angular) {
callback({message: 'angular never provided resumeBootstrap'});
} else {
callback({message: 'retries looking for angular exceeded'});
}
} else {
window.setTimeout(function() {check(n - 1);}, 1000);
}
} catch (e) {
callback({message: e});
}
};
check(attempts);
}).apply(this, arguments); }
catch(e) { throw (e instanceof Error) ? e : new Error(e); }, [10, false]]
21:33:41.288 INFO - Executing: [execute script: return (function (trackOutstandingTimeouts) {
var ngMod = angular.module('protractorBaseModule_', []).config([
'$compileProvider',
function($compileProvider) {
if ($compileProvider.debugInfoEnabled) {
$compileProvider.debugInfoEnabled(true);
}
}
]);
if (trackOutstandingTimeouts) {
ngMod.config([
'$provide',
function ($provide) {
$provide.decorator('$timeout', [
'$delegate',
function ($delegate) {
var $timeout = $delegate;
var taskId = 0;
if (!window['NG_PENDING_TIMEOUTS']) {
window['NG_PENDING_TIMEOUTS'] = {};
}
var extendedTimeout= function() {
var args = Array.prototype.slice.call(arguments);
if (typeof(args[0]) !== 'function') {
return $timeout.apply(null, args);
}
taskId++;
var fn = args[0];
window['NG_PENDING_TIMEOUTS'][taskId] =
fn.toString();
var wrappedFn = (function(taskId_) {
return function() {
delete window['NG_PENDING_TIMEOUTS'][taskId_];
return fn.apply(null, arguments);
};
})(taskId);
args[0] = wrappedFn;
var promise = $timeout.apply(null, args);
promise.ptorTaskId_ = taskId;
return promise;
};
extendedTimeout.cancel = function() {
var taskId_ = arguments[0] && arguments[0].ptorTaskId_;
if (taskId_) {
delete window['NG_PENDING_TIMEOUTS'][taskId_];
}
return $timeout.cancel.apply($timeout, arguments);
};
return extendedTimeout;
}
]);
}
]);
}
}).apply(null, arguments);, [true]])
21:33:41.312 INFO - Done: [execute script: return (function (trackOutstandingTimeouts) {
var ngMod = angular.module('protractorBaseModule_', []).config([
'$compileProvider',
function($compileProvider) {
if ($compileProvider.debugInfoEnabled) {
$compileProvider.debugInfoEnabled(true);
}
}
]);
if (trackOutstandingTimeouts) {
ngMod.config([
'$provide',
function ($provide) {
$provide.decorator('$timeout', [
'$delegate',
function ($delegate) {
var $timeout = $delegate;
var taskId = 0;
if (!window['NG_PENDING_TIMEOUTS']) {
window['NG_PENDING_TIMEOUTS'] = {};
}
var extendedTimeout= function() {
var args = Array.prototype.slice.call(arguments);
if (typeof(args[0]) !== 'function') {
return $timeout.apply(null, args);
}
taskId++;
var fn = args[0];
window['NG_PENDING_TIMEOUTS'][taskId] =
fn.toString();
var wrappedFn = (function(taskId_) {
return function() {
delete window['NG_PENDING_TIMEOUTS'][taskId_];
return fn.apply(null, arguments);
};
})(taskId);
args[0] = wrappedFn;
var promise = $timeout.apply(null, args);
promise.ptorTaskId_ = taskId;
return promise;
};
extendedTimeout.cancel = function() {
var taskId_ = arguments[0] && arguments[0].ptorTaskId_;
if (taskId_) {
delete window['NG_PENDING_TIMEOUTS'][taskId_];
}
return $timeout.cancel.apply($timeout, arguments);
};
return extendedTimeout;
}
]);
}
]);
}
}).apply(null, arguments);, [true]]
Like I said, these run just fine locally, so I am not sure what is going on with my Jenkins machine.
Here is my protractor config file:
// Configuration constants
var downloadsFolder = 'test/downloads/',
today = ("0" + (new Date()).getDate()).slice(-2),
month = ("0" + ((new Date()).getMonth() + 1)).slice(-2),
baseUrl = 'BASE URL GOES HERE';
// Test report setup w/ screenshot
var HtmlScreenshotReporter = require('protractor-jasmine2-screenshot-reporter');
var reporter = new HtmlScreenshotReporter({
dest: 'test/report',
filename: 'e2e-report.html'
});
// Protractor config
exports.config = {
suites: {
explore: '.protractor/src/app/exploration/tests/exploration.scenario.js',
login: '.protractor/src/auth-app/login/tests/login.scenario.js',
stories: '.protractor/src/app/story/tests/story.scenario.js',
cohorts: '.protractor/src/app/cohort/tests/cohort.scenario.js',
visualize: '.protractor/src/app/visualize/tests/visualize.scenario.js'
},
baseUrl: 'BASE URL GOES HERE',
directConnect: false,
// Override default 11s timeout for long requests such as visualize's "Recommended Visualizations"
// See https://github.com/angular/protractor/blob/master/docs/timeouts.md
allScriptsTimeout: 25 * 1000,
jasmineNodeOpts: {
defaultTimeoutInterval: 90 * 1000
},
multiCapabilities: [
{
browserName: 'chrome',
seleniumAddress: "http://SELENIUM SERVER URL HERE:4444/wd/hub",
platform: 'ANY',
version: 'ANY',
chromeOptions: {
args: ['--no-sandbox', '--test-type=browser', '--lang=en', '--start-maximized'],
prefs: {
download: {
prompt_for_download: false,
directory_upgrade: true,
default_directory: 'test/downloads'
},
},
}
// shardTestFiles: true,
// maxInstances: 2
}
],
onPrepare: function() {
// Set browser window size
browser.driver.manage().window().maximize();
//Setup screenshots
jasmine.getEnv().addReporter(reporter);
browser.get('BASE URL GOES HERE');
},
// Setup the report before any tests start
beforeLaunch: function() {
return new Promise(function(resolve){
reporter.beforeLaunch(resolve);
});
},
// Close the report after all tests finish
afterLaunch: function(exitCode) {
return new Promise(function(resolve){
reporter.afterLaunch(resolve.bind(this, exitCode));
});
},
params: {
baseUrl: baseUrl,
downloadsFolder: 'test/downloads',
cohort: {
listView: baseUrl + 'cohorts',
newView: baseUrl + 'cohorts/new'
},
story: {
listView: baseUrl + 'stories',
newView: baseUrl + 'story/new',
displayView: baseUrl + 'story'
},
visualize: {
listView: baseUrl + 'visualize',
newView: baseUrl + 'visualize/new'
},
explore: {
listView: baseUrl + 'explorations',
newView: baseUrl + 'explorations/new',
excelFilename: downloadsFolder + `DataExport_2016-${month}-${today}.xlsx`,
csvFilename: downloadsFolder + `DataExport_2016-${month}-${today}.csv`,
maxDownloadTime: 10 * 1000
}
}
};
This boiled down to a permissions issue. Once I added my jenkins user to sudo I was able to do a make command on the project which built all of the necessary files and which also converted my typescript tests over to Javascript and allowed them to run.
Can anybody give me example of code for uploading and downloading different type of files by Using Custom Control in ASP .NET MVC4 by using PlUpload Plugin. I want to save files for my task, message with unique Ids in database and want to retrieve them too. Here is my code that I tried for uploading
server side
public ActionResult UploadFiles(string id)
{
for (int i = 0; i < Request.Files.Count; i++)
{
var file = Request.Files[i];
file.SaveAs(AppDomain.CurrentDomain.BaseDirectory + "Uploads/" + file.FileName);
}
return Json(new { success = true }, JsonRequestBehavior.AllowGet);
}
and for plupload plugin code for client side for uploading file is
$("#file_attachments").pluploadQueue(
{
// General settings
runtimes: 'html5,flash,silverlight',
url: '/SideMenuBar/UploadFiles',
max_file_size: '100mb',
chunk_size: '1mb',
unique_names: true,
multipart: true,
// Specify what files to browse for
filters: [
{ title: "Image files", extensions: "jpg,gif,png" },
{ title: "Zip files", extensions: "zip" },
{ title: "Rar files", extensions: "rar" },
{ title: "Document files", extensions: "docx,doc,xlx,xlxs,ppt" },
],
// Flash settings
flash_swf_url: 'Script/lib/plupload/js/plupload.flash.swf',
// Silverlight settings
silverlight_xap_url: 'Script/lib/plupload/js/plupload.silverlight.xap',
// PreInit events, bound before any internal events
preinit: {
Init: function (up, info) {
//alert('[Init]'+ info+ 'Features:'+ up.features);
},
UploadFile: function (up, file) {
// alert('[UploadFile]', file);
// You can override settings before the file is uploaded
up.settings.url = '/SideMenuBar/UploadFiles?id=' + file.id;
//up.settings.multipart_params = {param1: 'value1', param2: 'value2'};
}
},
// Post init events, bound after the internal events
init:
UploadComplete: function (up, files) {
// destroy the uploader and init a new one
up.destroy();
}
}
});
var uploader = $('#file_attachments').pluploadQueue();
uploader.bind('FileUploaded', function (upldr, file, object) {
if (uploader.files.length == (uploader.total.uploaded + uploader.total.failed)) {
$(".file_upload_cancel").hide();
$(".file_upload_done").show();
}
});
uploader.bind("FilesAdded", function (up, filesToBeAdded) {
if (up.files.length > 5) {
up.files.splice(4, up.files.length - 5);
showStatus("Only 5 files max are allowed per upload. Extra files removed.", 3000, true);
return false;
}
return true;
});
$('.upload_files').click(function (e) {
e.preventDefault();
$(".file_up").show();
});
$('#new_message_form').submit(function (e) {
var uploader = $('#file_attachments').pluploadQueue();
// Files in queue upload them first
if (uploader.files.length > 0) {
// When all files are uploaded submit form
uploader.bind('StateChanged', function () {
if (uploader.files.length === (uploader.total.uploaded + uploader.total.failed)) {
//uncoment next line to submit form after all files are uploaded
//$('#new_message_form')[0].submit();
}
});
uploader.start();
}
return false;
});
}
How can I resolve problem
You are using an option called "chunk", which divides your file in the size of the chunk - it's a good practice, to prevent errors.
You have determined it with the property "chunk_size". For example: you have a file of 5mb. When you upload, you'll have 5 parts of 1mb - until the upload is complete. Then, you'll have to put them together.
I recommend you to see this link to more informations about chunk and how to make it work.
Here is an example of one of my implementation - with MVC 3 - of plupload with chunk.
I'll post the javascript code and the action. I think it will be good for you to know how to implement in your case.
function installFolderFileUploader(action, id, ProfileType, intMaxFilesPermitted, Folder, maxSizeMB) {
var uploaderRuntimes = 'html5, flash, silverlight';
var uploader = new plupload.Uploader({
runtimes: uploaderRuntimes,
browse_button: 'imgBtnPhotoUpload',
url: action,
flash_swf_url: '/Scripts/Plugins/Moxie.swf',
silverlight_xap_url: '/Scripts/Plugins/Moxie.xap',
multipart_params: { 'id': id, 'ProfileType': ProfileType },
multi_selection: true,
max_file_count: '5',
chunk_size: '100KB',
filters: {
max_file_size: maxSizeMB + 'MB'
},
init: {
FileUploaded: function (Up, File, Response) {
var jsonObj = jQuery.parseJSON(Response.response);
if (jsonObj.success) {
mountFileUploadFields(jsonObj, Folder, ProfileType);
}
},
PostInit: function () {
//meow
$('#imgBtnPhotoUpload').next().css({ 'top': '0', 'width': '146px', 'height': '28px', 'cursor': 'pointer' });
},
FilesAdded: function (up, files) {
var totalInPage = parseInt($('#dvFileContainer .BeeFileDetails').length);
if ((up.files.length + totalInPage) > parseInt(intMaxFilesPermitted)) {
jQuery.facebox({ div: "#dvMaxFilesPermitedError" });
up.splice();
up.refresh();
return false;
}
else {
if (totalInPage >= parseInt(intMaxFilesPermitted)) {
jQuery.facebox({ div: "#dvMaxFilesPermitedError" });
up.splice();
up.refresh();
return false;
}
else {
$('#dvFileList').css('margin-left', '2px');
$('#dvFileList').css('font-size', '10px');
$('#dvFileList').css('display', 'block');
plupload.each(files, function (file) {
$('#dvFileList').append('<div>');
$('#dvFileList').append('<div style="width:84%;margin-left:30px;float:left;" id="' + file.id + '">' + file.name + ' (' + plupload.formatSize(file.size) + ')<b></b></div>');
$('#dvFileList').append('<img class="removeFile" style="margin-top:2px;cursor:pointer;" src="/Content/images/cancel.png" id="' + file.id + '" />');
$('#dvFileList').append('</div>');
$('.removeFile').on('click', function () {
$('#' + file.id).remove();
$('img[id=' + file.id + ']').remove();
uploader.stop();
uploader.splice();
});
});
uploader.start();
}
}
},
UploadProgress: function (up, file) {
if (file.percent == 100) {
$('#' + file.id).remove();
$('img[id=' + file.id + ']').remove();
}
$('#' + file.id + ' b:eq(0)').html('<span> - ' + file.percent + '%</span>');
$('#' + file.id + ' b:eq(0)').append('<div id="fileUploaded" style="background-color:#0099FF;height:3px;width:' + file.percent + '%";></div>');
},
ChunkUploaded: function (up, file, info) {
var jsonObj = jQuery.parseJSON(info.response);
if (jsonObj.tempFile != "") {
uploader.settings.multipart_params.tempFile = jsonObj.tempFile;
}
else {
$('#' + file.id).remove();
$('img[id=' + file.id + ']').remove();
var totalInPage = parseInt($('#dvFileContainer .BeeFileDetails').length);
if (totalInPage > 0)
$('.BeeEditFileActions').fadeIn();
var fileName = uploader.settings.multipart_params.tempFile;
removeNonUsedFiles(id, fileName, 'File');
uploader.settings.multipart_params.tempFile = '';
uploader.stop();
uploader.splice();
uploader.refresh();
jQuery.facebox({ div: "#dvAddFolderFileError" });
}
},
Error: function (up, err) {
if (err.code != '-500')
jQuery.facebox({ div: "#dvAddFolderFileError" });
},
UploadComplete: function (a, Response) {
$('.BeeEditFileActions').fadeIn();
$('#dvFileList').empty();
uploader.splice();
uploader.refresh();
}
}
});
uploader.init();
}
And the Action:
[AllowAnonymous]
[HttpPost]
public JsonResult UploadFolderFile(string id, Domain.Profile.TypeProfile ProfileType, string tempFile, string name, int? chunk, int? chunks)
{
String strTempFile = string.Empty;
String strSaveLocation = string.Empty;
try
{
var fileData = Request.Files[0];
chunk = chunk ?? 0;
String strExtension = Path.GetExtension(name).ToLower();
Models.Identity.CustomIdentity objUser = new Models.Identity.CustomIdentity(System.Web.Security.FormsAuthentication.Decrypt(id));
DB.CompanyNetworkDB objCompanyDB = new DB.CompanyNetworkDB();
Int32 intMaxFileSize = objCompanyDB.getFileInFolderMaxSize(objUser.CompanyNetworkID) * 1024 * 1024;
if (objUser != null && objUser.IsAuthenticated && fileData.ContentLength <= intMaxFileSize)
{
////Get upload file.
String strSaveLocationURL = Domain.Profile.getUploadItemsFolder(objUser.CompanyNetworkID, ProfileType, Domain.Profile.UploadType.Folder);
strSaveLocationURL += "temp/";
strSaveLocation = Server.MapPath(strSaveLocationURL);
strTempFile = string.IsNullOrEmpty(tempFile) ? DateTime.Now.Ticks.ToString() + strExtension : tempFile;
long fileSize = 0;
using (var fs = new FileStream(Path.Combine(strSaveLocation, strTempFile), chunk == 0 ? FileMode.Create : FileMode.Append))
{
var buffer = new byte[fileData.InputStream.Length];
fileData.InputStream.Read(buffer, 0, buffer.Length);
fs.Write(buffer, 0, buffer.Length);
fileSize = fs.Length;
}
if (fileSize <= intMaxFileSize)
{
if (chunk == chunks - 1)
{
return Json(new { success = true, OriginalFileName = Path.GetFileName(name), ServerFileName = strTempFile, SizeMB = fileSize });
}
else
{
return Json(new { success = true, tempFile = strTempFile });
}
}
else
{
return Json(new { success = false });
}
}
else
{
return Json(new { success = false });
}
}
catch (ArgumentOutOfRangeException)
{
System.IO.File.Delete(Path.Combine(strSaveLocation, strTempFile));
return Json(new { success = false, erro = "canceled" });
}
catch (Exception ex)
{
throw new Exception(ex.Message);
}
}
I think it could help.