im writing a file transfer application to send and receive a large data like 1 GB.. but i think when i read the data from the file and fill it into a byte array it stored on RAM and that would effect on the computer speed .. should i do like :
(loop till end of the file)
{
read 128 MB from the file into byte array
(loop till end of 128)
{
send 1 kb to server
}
byte array = null
}
if that is right ..
which is better to do !! beginSend and beginReceive to send the large file or just loop to send the file
i would be glad if you teach me with some code
thanks in advance :)
Windows will start behaving oddly, if you [begin]Send more than ca. 1MB in one go. This differs between Windows versions, network drivers, shoe size of user and moon phase. Below 1 MB you should be fine.
So, either
(loop till end of the file)
{
read 128 MB from the file into byte array
(loop till end of 128)
{
send 1 MB to server
}
byte array = null
}
or, if it is really a file
SendFile(filename[,...])
even 128mb is not a good way .. its better to read a small buffer .. then send it straight to the other side
check it out.
after you send the fileName and the fileSize to other side
this should be common in (server/client)
FileStream fs;
NetworkStream network;
int packetSize = 1024*8;
Send method
public void Send(string srcPath, string destPath)
{
byte data;
string dest = Path.Combine(destPath, Path.GetFileName(srcPath));
using (fs = new FileStream(srcPath, FileMode.Open, FileAccess.Read))
{
try
{
long fileSize = fs.Length;
long sum = 0;
int count = 0;
data = new byte[packetSize];
while (sum < fileSize)
{
count = fs.Read(data, 0, packetSize);
network.Write(data, 0, count);
sum += count;
}
network.Flush();
}
finally
{
fs.Dispose();
data = null;
}
}
}
Receive method:
public void Receive(string destPath, long fileSize)
{
byte data;
using (fs = new FileStream(destPath, FileMode.Create, FileAccess.Write))
{
try
{
int count = 0;
long sum = 0;
data = new byte[packetSize];
while (sum < fileSize)
{
count = network.Read(data, 0, packetSize);
fs.Write(data, 0, count);
sum += count;
}
}
finally
{
fs.Dispose();
data = null;
}
}
}
Related
you have a file with multiple string lines and u have to put it into another file in fair random manner, how would you implement it with linkedlist and array only given as constraint.
the distribution should be random i.e. should not follow any pattern which is guessable i.e. round-robbin or something like that.
public class FairRandomDistributionDataFromOneToOtherFile {
public static void main(String[] args) {
// Driver Program to read the lines into the String Array.
FileReader fr = null;
LineNumberReader lnr = null;
try {
// Please Pass the File Path to the below filereader to run this program
fr = new FileReader("/home/sgarg/Documents/workspace-spring-tool-suite-4-4.10.0.RELEASE/Interveiw/src/com/sunil/stringfile");
lnr = new LineNumberReader(fr);
lnr.mark(500); // some random read ahead limit, I just choose 5000 randomly
lnr.skip(Integer.MAX_VALUE);
int totalLines = lnr.getLineNumber();
String[] data = new String[totalLines];
// resetting to mark, basically start of the file
lnr.reset();
for (int i=0; i<totalLines; i++) {
data[i] = lnr.readLine();
}
shuffleAndWriteDataToNewFile(data);
System.out.println(Arrays.toString(data));
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* #param data - All lines of the files read and passed as String into this String[]
*
*/
public static String[] shuffleAndWriteDataToNewFile(String[] data) {
int len = data.length;
int rand= 0;
for (int i=0; i < len; i++) {
// choose the next random number between the all current valid lines
rand = getNextRandomNumber(len - i);
// Now whatever the random line we get, we will move it to the end of the array and assume that
// next random number generator will generate random lines from 0 to len - i
swapLine(data, i, rand);
// this way our random lines getting settled in the last index of the array which keeps decrementing with loop iteration
// this way we dont need to take a 2nd array to store the lines and complexity vise it is an O(N) solution as we are iterating
// over the array once.
}
return data;
}
/**
* Swap two String Objects in the given String Array.
*/
public static void swapLine(String[] data, int curr, int rand) {
int len = data.length;
String temp = data[len-1 - curr];
data[len -1 -curr] = data[rand];
data[rand] = temp;
}
public static int getNextRandomNumber(int numOfElements) {
return (int)(Math.random() * numOfElements);
}
}
I need to read elevation data from a binary .hgt file in Swift. I have found this result for c, but I can not migrate it to Swift.
#include <stdio.h>
#define SIZE 1201
signed short int matrix[SIZE][SIZE] = {0};
int main(int argc, const char * argv[])
{
FILE *fp = fopen("N49E013.hgt", "rb");
unsigned char buffer[2];
for (int i = 0; i < SIZE; ++i)
{
for (int j = 0; j < SIZE; ++j)
{
if (fread(buffer, sizeof(buffer), 1, fp) != 1)
{
printf("Error reading file!\n");
system("PAUSE");
return -1;
}
matrix[i][j] = (buffer[0] << 8) | buffer[1];
}
}
fclose(fp);
}
#define SIZE 1201
This defines a constant named 'SIZE', so do that:
let size = 1201
next:
FILE *fp = fopen("N49E013.hgt", "rb");
This opens a file for reading. We can do that. Close the file in a 'defer' block, so that no matter what, the file gets closed when we're done.
// change the path below to the correct path
let handle = try FileHandle(forReadingFrom: URL(fileURLWithPath: "/path/to/N49E013.hgt"))
defer { handle.closeFile() }
Now, to construct the matrix. We want to create size number of arrays, each of which has size elements, read from the file. The original used two nested for loops, but Swift supports functional programming constructs, which we can use to do this a bit more elegantly:
let matrix = try (0..<size).map { _ in
try (0..<size).map { _ -> Int in
// Unfortunately, FileHandle doesn't have any decent error-reporting mechanism
// other than Objective-C exceptions.
// If you need to catch errors, you can use fread as in the original,
// or use an Objective-C wrapper to catch the exceptions.
let data = handle.readData(ofLength: 2)
if data.count < 2 { throw CocoaError(.fileReadCorruptFile) }
return (Int(data[0]) << 8) | Int(data[1])
}
}
Think that ought to do it.
I was implementing the same problem recently but found out solution provided by Charles Srstka is bit slow. It takes about 10 seconds to load one file on Late 2016 15" MBP.
I tweaked it a bit and made it about 50x faster using direct access to memory and reading it by rows instead of 2 bytes.
static let size = 1201
static func read(from path: String) throws -> [[UInt16]] {
let handle = try FileHandle(forReadingFrom: URL(fileURLWithPath: path))
defer { handle.closeFile() }
// Calculate all the necessary values
let unitSize = MemoryLayout<UInt16>.size
let rowSize = size * unitSize
let expectedFileSize = size * rowSize
// Get fileSize
let fileSize = handle.seekToEndOfFile()
// Check file size
guard fileSize == expectedFileSize else {
throw CocoaError(.fileReadCorruptFile)
}
// Go back to the start
handle.seek(toFileOffset: 0)
// Iterate
let matrix: [[UInt16]] = (0..<size).map { _ in
// Read a row
let data = handle.readData(ofLength: rowSize)
// With bytes...
let row: [UInt16] = data.withUnsafeBytes { (bytes: UnsafePointer<UInt16>) -> [UInt16] in
// Get the buffer. Count isn't using rowSize because it calculates number of bytes based on data type
let buffer = UnsafeBufferPointer<UInt16>(start: bytes, count: size)
// Create an array
return Array<UInt16>(buffer)
}
// Return row, swapping from Little to Big endian
return row.map { CFSwapInt16HostToBig($0) }
}
return matrix
}
I'm trying to get my Arduino to send data to a python script on my computer and for some reason the python script is reading the data incorrectly. Specifically, it reads the data correctly for about 4-5 receives then it starts to get out of sync. Below is my code:
Arduino:
struct sensorData
{
char beginPad[5];
int data0;
char pad0;
//int data1;
//char pad1;
//int data2;
};
union data
{
unsigned char buffer[sizeof(sensorData)];
sensorData vars;
};
data sendData;
void setup()
{
Serial.begin(9600);
static char temp[5] = {0, 1, 2, 3, 4};
memcpy(sendData.vars.beginPad, temp, 5);
sendData.vars.pad0 = 0;
//sendData.vars.pad1 = 0;
}
void loop()
{
sendData.vars.data0 = (256);
//sendData.vars.data1 = (512);
//sendData.vars.data2 = (1024);
Serial.write(sendData.buffer, sizeof(sensorData));
delay(100);
}
Python:
import serial
class SensorData:
def __init__(self, ser_obj, s_dict, p_start, p_len):
self.ser = ser_obj
self.sensor_buffer = s_dict
self.num_sensors = len(s_dict)
self.pad_start = p_start
self.pad_len = p_len
self.wait_for_beginning(p_start)
def poll(self):
for i in range(self.num_sensors):
tmp = self.ser_read()
tmp+=(self.ser_read()<<8)
print tmp
self.sensor_buffer.values()[i].append(tmp)
self.ser_read()
return self.sensor_buffer
def ser_read(self):
while True:
try:
data = ord(self.ser.read())
break
except:
continue
return data
def wait_for_beginning(self, start):
while True:
while(self.ser_read() != 0): continue
flag = 0
for i in range(1, self.pad_len):
if i == self.ser_read(): continue
else:
flag = 1
break
if flag: continue
else: break
def wait_for_end(self):
while True:
pad = self.ser_read()
if pad == self.pad_len-1:
break
#END OF CLASS
#main.py
def serial_poll( s_dict ):
dev = serial_init()
s = SensorData( dev, s_dict, 0, 5 )
while True:
sensor_buffer = s.poll()
s.wait_for_end()
print sensor_buffer
def serial_init():
ser = serial.Serial()
ser.baudrate = 9600
ser.port = '/dev/ttyACM0'
ser.open()
return ser
if __name__ == "__main__":
global sensor_buffer
sensor_buffer = { 'sensor1': [] }
serial_thread = threading.Thread( target=serial_poll, args=(sensor_buffer, ) )
serial_thread.daemon = True
serial_thread.start()
while True:
time.sleep( .1 )
I'm running the serial functions on a new thread because I have other things going on in this application. I'm basically trying to send some data that I have hard coded from the Arduino to a Linux computer. I have padding in between actual data to delimit it. At first I thought I was getting buffer overflows since each data buffer was more than 14 bytes long. I shortened it and the issue persisted. I then tried to add a delay since it might be sending data too fast. This didn't help either and now I'm out of ideas. If you need me to be more specific just let me know.
I am parsing big text files and it's working fine for some time but after few minutes it give me exception (An unhandled exception of type 'System.UnauthorizedAccessException' occurred in System.Core.dll
Additional information: Access to the path is denied.)
I get exception on below mention line.
accessor = MemoryMapped.CreateViewAccessor(offset, length, MemoryMappedFileAccess.Read);
Below is my function
public static void CityStateZipAndZip4(string FilePath,long offset,long length,string spName)
{
try
{
long indexBreak = offset;
string fileName = Path.GetFileName(FilePath);
if (fileName.Contains(".txt"))
fileName = fileName.Replace(".txt", "");
System.IO.FileStream file = new System.IO.FileStream(#FilePath, FileMode.Open,FileAccess.Read, FileShare.Read );
Int64 b = file.Length;
MemoryMappedFile MemoryMapped = MemoryMappedFile.CreateFromFile(file, fileName, b, MemoryMappedFileAccess.Read, null, HandleInheritability.Inheritable, false);
using (MemoryMapped)
{
//long offset = 182; // 256 megabytes
//long length = 364; // 512 megabytes
MemoryMappedViewAccessor accessor = MemoryMapped.CreateViewAccessor(offset, length, MemoryMappedFileAccess.Read);
byte byteValue;
int index = 0;
int count = 0;
StringBuilder message = new StringBuilder();
do
{
if (indexBreak == index)
{
count = count + 1;
accessor.Dispose();
string NewRecord = message.ToString();
offset = offset + indexBreak;
length = length + indexBreak;
if (NewRecord.IndexOf("'") != -1)
{ NewRecord = NewRecord.Replace("'", "''"); }
// string Sql = "insert into " + DBTableName + " (ID, DataString) values( " + count + ",'" + NewRecord + "')";
string Code = "";
if (spName == AppConfig.sp_CityStateZip)
{
Code = NewRecord.Trim().Substring(0, 1);
}
InsertUpdateAndDeleteDB(spName, NewRecord.Trim (), Code);
accessor = MemoryMapped.CreateViewAccessor(offset, length, MemoryMappedFileAccess.Read);
message = new StringBuilder();
index = 0;
//break;
}
byteValue = accessor.ReadByte(index);
if (byteValue != 0)
{
char asciiChar = (char)byteValue;
message.Append(asciiChar);
}
index++;
} while (byteValue != 0);
}
MemoryMapped.Dispose();
}
catch (FileNotFoundException)
{
Console.WriteLine("Memory-mapped file does not exist. Run Process A first.");
}
}
Somewhere deep in resource processing code we have something like this:
try {
// Try loading some strings here.
} catch {
// Oops, could not load strings, try another way.
}
Exception is thrown and handled already, it would never show up in your application. The only way to see it is to attach debugger and observe this message.
As you could see from the code, it has nothing to do with your problem. The real problem here is what debugger shows you something you should not see.
Run the solution without debugging mode and it works fine.
This exception means that your program does not get Read access to the file from Windows.
Have you made sure that this file is not locked when your program tries to read it ?
For example, it could be a file that your own program is currently using.
If not, try to run your program as an Administrator and see if it makes a difference.
I'm building an app that, in part, needs to resample any input PCM audio file that isn't 44100Hz to 44.1 (or at least make a best effort to do so).
To handle the resampling I'm using soxr. soxr has no dependencies and is lightweight, which is ideal in this case, but it offers no native file I/O. I have very limited experience with IO streams in C, so I'm hitting a wall. The app is being designed modularly, so I need the resample process to create an output file that can then be passed on to other processors, rather than simply dealing with the output stream directly.
In order to create that output file, I'm trying to take the data generated by the soxr resampling process, and pass it to libsndfile, which should be able to write the audio out to a file.
Below is an extremely verbose explanation of where I'm at, though I'm at a loss for why it's crashing. I suspect it has something to do with how buffers are being allocated and used. (Note: The input file is being read with sndfile prior to this code)
(Here's a single gist of the entire thing)
Basic resampler options
// Use "high quality" resampling
unsigned int q_recipe = SOXR_HQ;
// No
unsigned long q_flags = 0;
// Create the q_spec
soxr_quality_spec_t q_spec = soxr_quality_spec(q_recipe, q_flags);
Map the sndfile format to a soxr format
soxr_datatype_t itype;
// Get the SFINFO format
int iformat = self.inputFileInfo.format;
// Set the soxr itype to the corresponding format
if ((iformat & SF_FORMAT_FLOAT) == SF_FORMAT_FLOAT) {
itype = SOXR_FLOAT32_S;
} else if ((iformat & SF_FORMAT_DOUBLE) == SF_FORMAT_DOUBLE) {
itype = SOXR_FLOAT64_S;
} else if ((iformat & SF_FORMAT_PCM_32) == SF_FORMAT_PCM_32) {
itype = SOXR_INT32_S;
} else {
itype = SOXR_INT16_S;
}
Setup soxr IO spec
// Always want the output to match the input
soxr_datatype_t otype = itype;
soxr_io_spec_t io_spec = soxr_io_spec(itype, otype);
Threading
// A single thread is fine
soxr_runtime_spec_t runtime_spec = soxr_runtime_spec(1);
Construct the resampler
soxr_error_t error;
// Input rate can be read from the SFINFO
double const irate = self.inputFileInfo.samplerate;
// Output rate is defined elsewhere, but this generally = 44100
double const orate = self.task.resampler.immutableConfiguration.targetSampleRate;
// Channel count also comes from SFINFO
unsigned chans = self.inputFileInfo.channels;
// Put it all together
soxr_t soxr = soxr_create(irate, orate, chans, &error, &io_spec, &q_spec, &runtime_spec);
Read, resample & write
I'm not really confident in any of the following code, but I've triple checked the math and everything seems to meet the expectations of the libraries' APIs.
// Frames in sndfile are called Samples in soxr
// One frame is 1 item per channel
// ie frame_items = 1 item * channels
size_t const iframeitems = (1 * chans);
// item size is the data type size of the input type
//
size_t iitemsize;
if ((iformat & SF_FORMAT_FLOAT) == SF_FORMAT_FLOAT) {
iitemsize = sizeof(Float32);
} else if ((iformat & SF_FORMAT_DOUBLE) == SF_FORMAT_DOUBLE) {
iitemsize = sizeof(Float64);
} else if ((iformat & SF_FORMAT_PCM_32) == SF_FORMAT_PCM_32) {
iitemsize = sizeof(int32_t);
} else {
iitemsize = sizeof(int16_t);
}
// frame size is item size * items per frame (channels)
// eg for 2 channel 16 bit, frame size = 2 * 2
size_t const iframesize = (iframeitems * iitemsize);
// Number of frames to read (arbitrary)
sf_count_t const ireqframes = 1024;
// Size of the buffer is number of frames * size per frame
size_t const ibufsize = iframesize * ireqframes;
void *ibuf = malloc(ibufsize);
// Output
//////////////////////////////
// These match the input
size_t const oframeitems = iframeitems;
size_t const oitemsize = iitemsize;
// frame size is item size * items per frame (channels)
size_t const oframesize = (oframeitems * oitemsize);
// Number of frames expected after resampling
// eg
// orate = 44100
// irate = 48000
// ireqframe = 1024
// expect fewer frames (downsample)
// (44100 / 4800) * 1024 = 940.8
// Add 0.5 to deal with rounding?
sf_count_t const oexpframes = (ireqframes * (orate / irate)) + 0.5;
// Size of the buffer is number of frames * size per frame
size_t const obufsize = oframesize * oexpframes;
void *obuf = malloc(obufsize);
// Go
//////////////////////////////
size_t total_resample_output_frame_count = 0;
size_t need_input = 1;
sf_count_t num_frames_written = 0;
do {
sf_count_t num_frames_read = 0;
size_t actual_resample_output_samples = 0;
// Read the input file based on its type
// num_frames_read should be 1024
if (otype == SOXR_INT16_S || otype == SOXR_INT32_S) {
num_frames_read = sf_readf_int(self.inputFile, ibuf, ireqframes);
} else if (otype == SOXR_FLOAT32_S) {
num_frames_read = sf_readf_float(self.inputFile, ibuf, ireqframes);
} else {
num_frames_read = sf_readf_double(self.inputFile, ibuf, ireqframes);
}
// If there were no frames left to read we're done
if (num_frames_read == 0) {
// passing NULL input buffer to soxr_process indicates End-of-input
ibuf = NULL;
need_input = 0;
}
// Run the resampling on frames read from the input file
error = soxr_process(soxr, ibuf, num_frames_read, NULL, obuf, oexpframes, &actual_resample_output_samples);
total_resample_output_frame_count += actual_resample_output_samples;
// Write the resulting data to output file
// num_frames_written should = actual_resample_output_samples
if (otype == SOXR_INT16_S || otype == SOXR_INT32_S) {
num_frames_written = sf_writef_int(self.outputFile, obuf, actual_resample_output_samples);
} else if (otype == SOXR_FLOAT32_S) {
num_frames_written = sf_writef_float(self.outputFile, obuf, actual_resample_output_samples);
} else {
num_frames_written = sf_writef_double(self.outputFile, obuf, actual_resample_output_samples);
}
} while (!error && need_input);
soxr_delete(soxr);
free(obuf), free(ibuf);
This gives and EXC_BAD_ACCESS on soxr_process. I have no idea what else to try at this point.
The _S in data types like SOXR_INT32_S mean that you're using split channels, and from the example 4-split-channels.c it seems that in that case you need to pass an array of pointers, one for each channel.
However, in the code above you just pass a single allocated block of memory so I'm guessing you're expecting interleaved channel data. Perhaps you can try changing the _S to _I.