how to skip first line of csv file data using angularjs? - angularjs

I am using datatables to print the csv file data. The CSV file consists of data .this is an exmple data file.
name,city,category,discount
surya,gnt,all,10%
surya,gnt,all,10%
surya,gnt,all,10%
surya,gnt,all,10%
surya,gnt,all,10%
I want to skip first line of data . How to stop displaying th first line in my datatable. And my code is`
//saving csv file to firebase
$('#save-csv').bind('click', function() {
if (window.File && window.FileReader && window.FileList && window.Blob) {
var file = document.getElementById('files').files[0];
var reader = new FileReader();
reader.readAsText(file);
reader.onload = function(event){
var csv = event.target.result;
var data = $.csv.toArrays(csv);
var csvObj = {}
for(key in data){
csvObj[key] = {};
csvObj[key].name = data[key][0];
csvObj[key].city = data[key][1];
csvObj[key].category = data[key][2];
csvObj[key].discount = data[key][3];
}
console.log(csvObj);
$scope.csvStores.stores = csvObj;
$scope.csvStores.$save().then(function(res){
console.info(res);
}).catch(function(err){
console.error(err);
});
$("#myModal").modal("hide");
swal(
'Saved',
'Successfully Saved',
'success'
)
}
}else {
}
});

Before you iterate over your data you should somehow remove/skip the first line.
In my opinion it is better to remove, cause if you would like to use later this array, then you have to skip again.
I have two ideas:
before the iteration call:data = data.shift();
based on this: http://www.w3schools.com/jsref/jsref_shift.asp
This will remove the first element.
Or if you are using lodash, than you can easily call data = _.tail(data); based on this: https://lodash.com/docs/4.16.4#tail

Related

Convert CSV Array To JSON NodeJS

I am scraping data from a URL containing a csv. The format of the data I'm scraping is like this:
I am doing this in Node.js and using the nodejs-requestify package: https://www.npmjs.com/package/requestify
When I console the response.getBody() it's in the exact same format as the screenshot provided.
I am trying to convert this to a JSON array that I can iterate over in a loop to insert the values into a database, however, am struggling to get the data into JSON format.
I've tried splitting the array in multiple ways (comma, single quote, double quote). I've tried JSON.parse() and JSON.stringify() (and both in combination).
Here is the code I'm using. Ultimately when I console.log rows in the loop, this is where it should be in JSON format, however, it's just coming in as comma separated values still.
requestify.get('URL').then(function(response) {
// Get the response body
var dataBody = response.getBody();
var lineArray = dataBody.split('\r\n');
var data = JSON.parse(JSON.stringify(lineArray));
for(var s = 0; s < data.length; s++) {
var rows = data[s];
console.log(rows)
}
});
There is a basic misundertanding I think
var lineArray = dataBody.split('\r\n');
lineArray now contains something like
"a", "b", "c"
but for doing something like
var data = JSON.parse(lineArray);
you need lineArray to be
{ "a":"1", "b":"2", "c":"3" }
I think you need something like
const lineData = lineArray.split(',');
const keys = ["name", "age", "gender"];
const jsonLineData = {};
keys.forEach((key, index) => {
jsonLineData[key] = lineData(index);
});
I solved this by using csvtojson and aws-dsk since my csv is hosted on S3.
async function startAWS(db){
//Retrieve AWS IAM credentials for the 'master' user
var awsCredentials;
try{
awsCredentials = await retrievePromise(config.get('aws'));
}
catch (e) {
console.log({error:e},'startAWS error');
}
//Setup the AWS config to access our S3 bucket
AWS.config = new AWS.Config({
accessKeyId : awsCredentials.principal,
secretAccessKey :awsCredentials.credential,
region:'us-east-1'
});
//Call S3 and specify bucket and file name
const S3 = new AWS.S3();
const params = {
Bucket: '***',
Key: '***' //filename
};
//Convert csv file to JSON
async function csvToJSON() {
// get csv file and create stream
const stream = S3.getObject(params).createReadStream();
// convert csv file (stream) to JSON format data
const json = await csv().fromStream(stream);
//connect to DB and continue script
db.getConnection()
.then(async (conn) => {
if(json.length) {
for(var s = 0; s < json.length; s++) {
var rows = json[s];
const insert = await conn.query(
'SQL HERE'
);
}
}
})
};
csvToJSON();
}

Reset the content of a CSV file in the file cabinet

I'm reading the content of a CSV file in the file cabinet with a user Event Suitelet living on sales orders and trying to reset the content of the file to an empty CSV after I'm done
I can successfully append lines and read the content but nothing about resetting the content in the NetSuite documentation.
I'm just looking for a way to reset the File to an empty CSV.
/**
*
#NApiVersion 2.x
#NModuleScope SameAccount
#NScriptType UserEventScript
#appliedtorecord salesorder
*/
define(['N/file'], function(file) {
function resetCSVFile(context) {
var fileObj = file.load({ id: '104819' });
var iterator = fileObj.lines.iterator();
var idArrays = [];
iterator.each(function(line) {
idArrays.push(line.value);
// the line below is my failed attempt at resetting the line
line.value = ''
return true;
});
log.audit({ title: 'idArrays', details: idArrays });
fileObj.save();
return true;
}
return {
afterSubmit: resetCSVFile
};
});
After you're done processing the file, you'll want to use file.create() to make a new file object with the same name, fileType, and folder property values. Set the contents property of that file object to something (perhaps the header row), and save it. This will overwrite the existing file with an empty file but keep the same internal id of the original file.
Here's an example that captures the header row of the CSV file and creates an empty file with that header row. When you create a file object, the contents property cannot be null or an empty string.
var fileObj = file.load({ id: '5447' });
var currentLine = 0;
var headerRow = '';
fileObj.lines.iterator().each(function(line) {
currentLine++;
if (currentLine === 1) {
headerRow = line.value + '\n';
}
log.debug({ title: 'header', details: line.value });
return true;
});
var newFile = file.create({
name: fileObj.name,
fileType: file.Type.CSV,
folder: fileObj.folder,
contents: headerRow
});
newFile.save();

Get an image of a vbhtml view as a byte array and save it to an oracle database

I need help on an mvc application in vb.net. In general terms I need to receive an image through the view and get it to work on the controller. I need to do this to convert the image to a byte array and save it to an oracle database. So my idea is to get the image and in the controller to convert it to a byte array or maybe there is some way to get the image already as a byte array and pass that array to the controller to save it to the database.
something like this its my View :
<div class="span11">
<div class="span4" id="depnac">
#Html.LabelFor(Function(m) m.DepNacPER)
#Html.DropDownListFor(Function(m) m.DepNacPER, Model.DepNacPER, New With {.class = "form-control"})
</div>
and this is my Model :
<Display(Name:="Region of birth")>
<Required(ErrorMessage:="you must select a option")>
Property DepNacPER As SelectList
I'm working on an ASP.NET Core app right now that uploads images. The image comes through to the controller via the request as a Stream. I'm then creating an Image object from that Stream but you could just read the data from it directly. That said, you might want to try to create an Image object to confirm that the data does represent a valid image.
Here's some relevant code from the view's script:
function uploadImage()
{
// This is a file upload control in a hidden div.
var image = $("#imageFile");
if (image[0].files.length > 0)
{
var formData = new FormData();
formData.append(image[0].files[0].name, image[0].files[0]);
var xhr = new XMLHttpRequest();
xhr.open("POST", "#Url.Content("~/events/uploadimage")");
xhr.send(formData);
xhr.onreadystatechange = function ()
{
if (xhr.readyState === 4 && xhr.status === 200)
{
var response = JSON.parse(xhr.responseText);
if (response.saveSuccessful)
{
// ...
} else
{
window.location.replace("#Url.Content("~/error")");
}
}
}
xhr.onerror = function(err, result)
{
alert("Error: " + err.responseText);
}
}
}
I'm in the process of replacing that code with some jQuery that does the heavy lifting but haven't got that far yet.
Here's some relevant code from the action:
[HttpPost]
public IActionResult UploadImage()
{
var requestForm = Request.Form;
StringValues tempImageFileNames;
string tempImageFileName = null;
string imageUrl = null;
var saveSuccessful = true;
var requestFiles = requestForm.Files;
if (requestFiles.Count > 0)
{
// A file has been uploaded.
var file = requestFiles[0];
using (var stream = file.OpenReadStream())
{
try
{
using (var originalImage = System.Drawing.Image.FromStream(stream))
{
// Do whatever you like with the Image here.
}
}
catch (Exception)
{
saveSuccessful = false;
}
}
}
if (saveSuccessful)
{
return Json(new {saveSuccessful, tempImageFileName, imageUrl});
}
else
{
return Json(new {saveSuccessful});
}
}
Sorry, it didn't occur to me at first that you're after VB code and this is C#. Hopefully you can still get the idea and I'll take the hit if someone dislikes the answer.

Sample code in angularjs, to read and write text file

I am completely new to this.The solutions that I am getting from internet is reading the file and displaying as it is, but I want to read the file line by line.
var lines = this.result.split('\n');
for(var line = 0; line < lines.length; line++){
console.log(lines[line]); }
with \n you can split a file at his linebreaks.
Your question was very vague but this is the solution when you want to output a file line by line.
To get this into Context:
First we need to input a file, we can achieve this via HTML:
<input type="file" name="file" id="filename">
Now create a js function to grab this file:
document.getElementById('filename').onchange = function(){
// declare file variable
var filevar = this.files[0];
//the Reader:
var datareader = new FileReader();
reader.onload = function(progressEvent){
};
I already included a FileReader, as you can see
Now you can combine this to a full function:
document.getElementById('filename').onchange = function(){
var filevar = this.files[0];
var datareader = new FileReader();
reader.onload = function(progressEvent){
var lines = this.result.split('\n');
for(var line = 0; line < lines.length; line++){
console.log(lines[line]);
}
};
datareader.readAsText(filevar);
};
This is a bit tutorialish so sry, but i guess it helps more then just Code.

ui-grid using external Pagination and exporting data

I am currently using ui-grid implemented with external pagination to display my data. I've run into as issue where I can only export the currently viewed data, however I need to be able to export ALL data as well.
Anyone know a work around to export all data using external pagination?
I ended up using csvExport function included with ui-grid. I added an "Export All" custom menu item and it works great! Here is my code:
gridMenuCustomItems: [
{
title: 'Export All',
action: function ($event) {
$http.get(url).success(function(data) {
$scope.gridOptions.totalItems = data.totalFeatures;
$scope.gridOptions.data = data.features;
$timeout(function()
{
var myElement = angular.element(document.querySelectorAll(".custom-csv-link-location"));
$scope.gridApi.exporter.csvExport( uiGridExporterConstants.ALL, uiGridExporterConstants.ALL, myElement );
}, 1000);
});
}
}
]
Hope this helps someone!
Ok, so I took ui-grids server side example and modified their plnkr a bit. I simply created a button outside the ui-grid and that button calls your datasource which converts json into CSV and downloads the file
http://plnkr.co/edit/xK3TYtKANuci0kUgGacQ?p=preview
<button ng-click="exportAllData()">Export Data</button>
then in your controller:
$scope.exportAllData = function()
{
setTimeout(function()
{
$http.get('largeLoad.json').success(function(response)
{
$scope.JSONToCSVConvertor(response, "Data Title", true);
});
},100);
};
$scope.JSONToCSVConvertor = function(JSONData, ReportTitle, ShowLabel)
{
//If JSONData is not an object then JSON.parse will parse the JSON string in an Object
var arrData = typeof JSONData != 'object' ? JSON.parse(JSONData) : JSONData;
var CSV = '';
//Set Report title in first row or line
CSV += ReportTitle + '\r\n\n';
//This condition will generate the Label/Header
if (ShowLabel) {
var row = "";
//This loop will extract the label from 1st index of on array
for (var index in arrData[0]) {
//Now convert each value to string and comma-seprated
row += index + ',';
}
row = row.slice(0, -1);
//append Label row with line break
CSV += row + '\r\n';
}
//1st loop is to extract each row
for (var i = 0; i < arrData.length; i++) {
var row = "";
//2nd loop will extract each column and convert it in string comma-seprated
for (var index in arrData[i]) {
row += '"' + arrData[i][index] + '",';
}
row.slice(0, row.length - 1);
//add a line break after each row
CSV += row + '\r\n';
}
if (CSV == '') {
alert("Invalid data");
return;
}
//Generate a file name
var fileName = "MyReport_";
//this will remove the blank-spaces from the title and replace it with an underscore
fileName += ReportTitle.replace(/ /g,"_");
//Initialize file format you want csv or xls
var uri = 'data:text/csv;charset=utf-8,' + escape(CSV);
// Now the little tricky part.
// you can use either>> window.open(uri);
// but this will not work in some browsers
// or you will not get the correct file extension
//this trick will generate a temp <a /> tag
var link = document.createElement("a");
link.href = uri;
//set the visibility hidden so it will not effect on your web-layout
link.style = "visibility:hidden";
link.download = fileName + ".csv";
//this part will append the anchor tag and remove it after automatic click
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
}
JSONToCSVConverter() source: http://jsfiddle.net/hybrid13i/JXrwM/

Resources