Ble data sending with nrf52 - c

I am working with nrf52 and I made a custom service for 17 bytes data array and I want to send it but it gives fatal error while in data update (sending) function and resets the program.
Here's my custom service data update function:
uint32_t ble_cus_mydata_update(ble_cus_t * p_cus, uint8_array_t mydata, uint16_t conn_handle)
{
if (p_cus->conn_handle != BLE_CONN_HANDLE_INVALID)
{
ble_gatts_hvx_params_t params;
memset(&params, 0, sizeof(params));
params.type = BLE_GATT_HVX_NOTIFICATION;
params.handle = p_cus->mydata_char_handles.value_handle;
params.p_data = &mydata;
params.p_len = 17;
return sd_ble_gatts_hvx(conn_handle, &params);
}}
and i send data with timeout_handler
static void mydata_timeout_handler(void * p_context)
{
ret_code_t err_code;
UNUSED_PARAMETER(p_context);
bsp_board_led_on(2);
nrf_delay_ms(100);
bsp_board_led_off(2);
uint8_array_t mydata_value [17] = {0x55,0x10,0x01,0x23,0x99,0xFF,0xFF,0xCC,0xBB,0x00,0x00,0x00,0x00,0x00,0x24,0x24,0x20};
err_code = ble_cus_mydata_update(&m_cus, *mydata_value , m_conn_handle);
if ((err_code != NRF_SUCCESS) &&
(err_code != NRF_ERROR_INVALID_STATE) &&
(err_code != NRF_ERROR_RESOURCES) &&
(err_code != NRF_ERROR_BUSY) &&
(err_code != BLE_ERROR_GATTS_SYS_ATTR_MISSING)
)
{
APP_ERROR_HANDLER(err_code);
}
}
And here is my service char add
uint8_array_t mydata_char_init_value [17] = {0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
memset(&add_char_params, 0, sizeof(add_char_params));
add_char_params.uuid = MYDATA_CHAR_UUID;
add_char_params.uuid_type = p_cus->uuid_type;
add_char_params.init_len = 17;
add_char_params.max_len = 17;
add_char_params.p_init_value = mydata_char_init_value;
add_char_params.char_props.read = 1;
add_char_params.char_props.notify = 1;
add_char_params.read_access = SEC_OPEN;
add_char_params.cccd_write_access = SEC_OPEN;
err_code = characteristic_add(p_cus->service_handle,
&add_char_params,
&p_cus->mydata_char_handles);
if (err_code != NRF_SUCCESS)
{
return err_code;
}
I get an error:
nrf error_code= 0x2000310c
There is no info about this error message; it throws this fatal error and goes to nrf_breakpoint_cond after error handler and resets itself.
Please help me; I tried everything I know and I can't move forward. Thanks in advance to whomever tries to help me.

i solved it and wanted to help others who might need it its a little different this code is for sending 4 bytes of data you need to do a little changes for 17 bytes but its not that much the other codes are same you just need to change update function to something like this
uint32_t ble_cus_pres_level_update(ble_cus_t * p_cus, uint32_t presvalue)
{
// NRF_LOG_INFO("In ble_cus_presvalue_update. \r\n");
if (p_cus == NULL)
{
return NRF_ERROR_NULL;
}
uint32_t err_code = NRF_SUCCESS;
ble_gatts_value_t gatts_value;
// Initialize value struct.
memset(&gatts_value, 0, sizeof(gatts_value));
gatts_value.len = sizeof(uint32_t);
gatts_value.offset = 0;
gatts_value.p_value = &presvalue;
// Update database.
err_code = sd_ble_gatts_value_set(p_cus->conn_handle,
p_cus->bme280_presvalue_char_handles.value_handle,
&gatts_value);
if (err_code != NRF_SUCCESS)
{
return err_code;
}
// Send value if connected and notifying.
if (p_cus->conn_handle != BLE_CONN_HANDLE_INVALID)
{
ble_gatts_hvx_params_t params;
memset(&params, 0, sizeof(params));
params.type = BLE_GATT_HVX_NOTIFICATION;
params.handle = p_cus->bme280_presvalue_char_handles.value_handle;
params.offset = gatts_value.offset;
params.p_data = gatts_value.p_value;
params.p_len = &gatts_value.len;
err_code = sd_ble_gatts_hvx(p_cus->conn_handle, &params);
// NRF_LOG_INFO("sd_ble_gatts_hvx result: %x. \r\n", err_code);
}
else
{
err_code = NRF_ERROR_INVALID_STATE;
// NRF_LOG_INFO("sd_ble_gatts_hvx result: NRF_ERROR_INVALID_STATE. \r\n");
}
return err_code;
}

Shouldn't err_code = ble_cus_mydata_update(&m_cus, *mydata_value , m_conn_handle); be err_code = ble_cus_mydata_update(&m_cus, mydata_value , m_conn_handle);?

Related

How to get noise data from ESP32

I am using M5StickC, trying to use the microphone for sensing noise. I am not familiar with Arduino. I was able to connect the device to wifi. I have been looking at examples and got this :
bool InitI2SMicroPhone()
{
esp_err_t err = ESP_OK;
i2s_config_t i2s_config = {
.mode = (i2s_mode_t)(I2S_MODE_MASTER | I2S_MODE_RX | I2S_MODE_PDM),
.sample_rate = 44100,
.bits_per_sample = I2S_BITS_PER_SAMPLE_16BIT, // is fixed at 12bit, stereo, MSB
.channel_format = I2S_CHANNEL_FMT_ALL_RIGHT,
.communication_format = I2S_COMM_FORMAT_I2S,
.intr_alloc_flags = ESP_INTR_FLAG_LEVEL1,
.dma_buf_count = 2,
.dma_buf_len = 128,
};
i2s_pin_config_t pin_config;
pin_config.bck_io_num = I2S_PIN_NO_CHANGE;
pin_config.ws_io_num = PIN_CLK;
pin_config.data_out_num = I2S_PIN_NO_CHANGE;
pin_config.data_in_num = PIN_DATA;
err += i2s_driver_install(I2S_NUM_0, &i2s_config, 0, NULL);
err += i2s_set_pin(I2S_NUM_0, &pin_config);
err += i2s_set_clk(I2S_NUM_0, 44100, I2S_BITS_PER_SAMPLE_16BIT, I2S_CHANNEL_MONO);
//i2s_set_clk(0)
if (err != ESP_OK)
{
return false;
}
else
{
return true;
}
}
However, I am not sure how to get amplitude data. I want to get noise info and send it over wifi. I do not see this function in Loop. How can I extract the needed info in the Loop function?

Why does SChannel TLS limit a message to 32kb

I am working on using SChannel to build a client/server program. One of the things I would like to do is have file sharing. I found some example code of a client program using Schannel to communicate and I am wondering why the max size of a message is 32kb. Here is the example function that does the receiving
int tls_handshake(tls_ctx *c, tls_session *s) {
DWORD flags_in, flags_out;
SecBuffer ib[2], ob[1];
SecBufferDesc in, out;
int len;
// send initial hello
if (!tls_hello(c, s)) {
return 0;
}
flags_in = ISC_REQ_REPLAY_DETECT |
ISC_REQ_CONFIDENTIALITY |
ISC_RET_EXTENDED_ERROR |
ISC_REQ_ALLOCATE_MEMORY |
ISC_REQ_MANUAL_CRED_VALIDATION;
c->ss = SEC_I_CONTINUE_NEEDED;
s->buflen = 0;
while (c->ss == SEC_I_CONTINUE_NEEDED ||
c->ss == SEC_E_INCOMPLETE_MESSAGE ||
c->ss == SEC_I_INCOMPLETE_CREDENTIALS)
{
if (c->ss == SEC_E_INCOMPLETE_MESSAGE)
{
// receive data from server
len = recv(s->sck, &s->buf[s->buflen], s->maxlen - s->buflen, 0);
// socket error?
if (len == SOCKET_ERROR) {
c->ss = SEC_E_INTERNAL_ERROR;
break;
// server disconnected?
} else if (len==0) {
c->ss = SEC_E_INTERNAL_ERROR;
break;
}
// increase buffer position
s->buflen += len;
}
// inspect what we've received
//tls_hex_dump(s->buf, s->buflen);
// input data
ib[0].pvBuffer = s->buf;
ib[0].cbBuffer = s->buflen;
ib[0].BufferType = SECBUFFER_TOKEN;
// empty buffer
ib[1].pvBuffer = NULL;
ib[1].cbBuffer = 0;
ib[1].BufferType = SECBUFFER_VERSION;
in.cBuffers = 2;
in.pBuffers = ib;
in.ulVersion = SECBUFFER_VERSION;
// output from schannel
ob[0].pvBuffer = NULL;
ob[0].cbBuffer = 0;
ob[0].BufferType = SECBUFFER_VERSION;
out.cBuffers = 1;
out.pBuffers = ob;
out.ulVersion = SECBUFFER_VERSION;
c->ss = c->sspi->
InitializeSecurityContextA(
&s->cc, &s->ctx, NULL, flags_in, 0,
SECURITY_NATIVE_DREP, &in, 0, NULL,
&out, &flags_out, NULL);
// what have we got so far?
if (c->ss == SEC_E_OK ||
c->ss == SEC_I_CONTINUE_NEEDED ||
(FAILED(c->ss) && (flags_out & ISC_RET_EXTENDED_ERROR)))
{
// response for server?
if (ob[0].cbBuffer != 0 && ob[0].pvBuffer) {
// send response
tls_send(s->sck, ob[0].pvBuffer, ob[0].cbBuffer);
// free response
c->sspi->FreeContextBuffer(ob[0].pvBuffer);
ob[0].pvBuffer = NULL;
}
}
// incomplete message? continue reading
if (c->ss==SEC_E_INCOMPLETE_MESSAGE) continue;
// completed handshake?
if (c->ss==SEC_E_OK) {
s->established = 1;
// If the "extra" buffer contains data, this is encrypted application
// protocol layer stuff and needs to be saved. The application layer
// will decrypt it later with DecryptMessage.
if (ib[1].BufferType == SECBUFFER_EXTRA) {
DEBUG_PRINT(" [ we have extra data after handshake.\n");
memmove(s->pExtra.pvBuffer,
&s->buf[(s->buflen - ib[1].cbBuffer)], ib[1].cbBuffer);
s->pExtra.cbBuffer = ib[1].cbBuffer;
s->pExtra.BufferType = SECBUFFER_TOKEN;
} else {
// no extra data encountered
s->pExtra.pvBuffer = NULL;
s->pExtra.cbBuffer = 0;
s->pExtra.BufferType = SECBUFFER_EMPTY;
}
break;
}
// some other error
if(FAILED(c->ss)) break;
// Copy any leftover data from the "extra" buffer, and go around again.
if(ib[1].BufferType == SECBUFFER_EXTRA) {
memmove(s->buf, &s->buf[(s->buflen - ib[1].cbBuffer)], ib[1].cbBuffer);
s->buflen = ib[1].cbBuffer;
DEBUG_PRINT(" [ we have %i bytes of extra data.\n", s->buflen);
tls_hex_dump(s->buf, s->buflen);
} else {
s->buflen = 0;
}
}
return c->ss==SEC_E_OK ? 1 : 0;
}
The code comes from a Github I found here: https://github.com/odzhan/shells/blob/master/s6/tls.c
Inside one of his header files he defines
#define TLS_MAX_BUFSIZ 32768
I have also read in other places that this is a limit with TLS. Is it possible to increase that limit? What happens if I need to receive more then that? Like a large file?

Parse http request --> run task in background --> when task is finished return response

As title says, I'm developing http handler module for nginx in which I want to parse http request, run task in background and when it finishes, I want to return response.
I tried having http content handler which uses thread pool to run task. While parsing request I incremented r->main->count and decremented it in task completion handler. Also, in task completion handler I call ngx_http_send_header and ngx_http_output_filter to return result. This works for 10ish requests and then nginx is blocked. It doesnt process http requests anymore.(nothing in logs). In my dev environment, for easier debugging, I'm running nginx with deamon off, master process off and only one worker process.
I also tried attaching my handler in PREACCESS and REWRITE PHASE and then return NGX_DONE when spawning thread, in hope that nginx will call my handler again, but it never does.
Any ideas what I'm doing wrong?
Thank you!
static ngx_int_t ngx_http_image_render_handler(ngx_http_request_t *r)
{
ngx_http_image_render_ctx_t *ctx;
ngx_http_image_render_loc_conf_t *plcf;
plcf = (ngx_http_image_render_loc_conf_t*)ngx_http_get_module_loc_conf(r, ngx_http_image_render_module);
ctx = (ngx_http_image_render_ctx_t*)ngx_http_get_module_ctx(r, ngx_http_image_render_module);
if(ctx != NULL)
{
if(ctx->done != 1)
{
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "ctx already here! AGAIN");
return NGX_DONE;
}
return send_image(ctx->image_render_ctx);
}
ctx = (ngx_http_image_render_ctx_t*)ngx_pcalloc(r->pool, sizeof(ngx_http_image_render_ctx_t));
if (ctx == NULL) {
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "NGX_HTTP_INTERNAL_SERVER_ERROR");
return NGX_HTTP_INTERNAL_SERVER_ERROR;
}
ctx->request = r;
ctx->done = 0;
ngx_http_set_ctx(r, ctx, ngx_http_image_render_module);
image_render_ctx_t *image_render_ctx;
ngx_thread_task_t *task;
task = ngx_thread_task_alloc(r->pool, sizeof(image_render_ctx_t));
if (task == NULL) {
return NGX_ERROR;
}
image_render_ctx = (image_render_ctx_t*)task->ctx;
ctx->image_render_ctx = image_render_ctx;
if(ngx_http_parse_image_request(r, image_render_ctx) != NGX_OK )
{
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "error parsing request");
return NGX_ERROR;
}
image_render_ctx->db_util = plcf->db_util;
task->handler = image_render_func;
task->event.handler = image_render_completion;
task->event.data = image_render_ctx;
if (ngx_thread_task_post((ngx_thread_pool_t*)plcf->pool, task) != NGX_OK) {
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "error posting to pool");
return NGX_ERROR;
}
r->main->count++;
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "RECEIVED REQUEST!");
return NGX_DONE;
}
static int send_image(image_render_ctx_t* trc)
{
ngx_buf_t *b;
ngx_chain_t out;
u_char* image = (u_char*)trc->image_data.data();
size_t image_size = trc->image_data.length();
ngx_http_request_t *r = trc->r;
b = ngx_create_temp_buf(r->pool, image_size);
out.buf = b;
out.next = NULL;
ngx_memcpy(b->pos, image, image_size);
b->last = b->pos + image_size;
b->memory = 1;
b->last_buf = 1;
r->main->count--;
r->headers_out.content_type.len = sizeof("image/png") - 1;
r->headers_out.content_type.data = (u_char *) "image/png";
r->headers_out.status = NGX_HTTP_OK;
r->headers_out.content_length_n = image_size;
ngx_http_send_header(r);
return ngx_http_output_filter(r, &out);;
}
//Attach our handler
static ngx_int_t ngx_http_image_render_init(ngx_conf_t *cf)
{
ngx_http_handler_pt *h;
ngx_http_core_main_conf_t *cmcf;
cmcf = (ngx_http_core_main_conf_t*)ngx_http_conf_get_module_main_conf(cf, ngx_http_core_module);
h = (ngx_http_handler_pt*)ngx_array_push(&cmcf->phases[NGX_HTTP_PREACCESS_PHASE].handlers);
if (h == NULL) {
return NGX_ERROR;
}
*h = (ngx_http_handler_pt)ngx_http_image_render_handler;
return NGX_OK;
}
static void
image_render_completion(ngx_event_t *ev)
{
image_render_ctx_t *ctx = (image_render_ctx_t*)ev->data;
ngx_log_error(NGX_LOG_ERR, ctx->r->connection->log, 0, "sending back image!!!");
ngx_http_image_render_ctx_t *rctx;
rctx = (ngx_http_image_render_ctx_t*)ngx_http_get_module_ctx(ctx->r, ngx_http_image_render_module);
rctx->done = 1;
}

ReadFile returns 0 bytes with GetLastError() as 0

I am using a generic write and read operations using the serial communication with R-232 connector to an external device.
For specific write operations, the readFile returns the data.
But for one command, it returns 0 bytes and GetLastError() returns 0.
I am using the application on a windows PDA.
Below is the code used:
long port = jport;
long baudrate = jbaudrate;
int cmd_length = env->GetStringLength(jcmd);
const jchar* cmd = env->GetStringChars(jcmd, 0);
zCommand = new char[cmd_length+1];
for (int j=0;j<cmd_length;j++)
zCommand[j] = (char)cmd[j];
zCommand[cmd_length]='\r';
env->ReleaseStringChars(jcmd,cmd);
WCHAR comPort[6];
wsprintf(comPort, L"COM%d:", port);
LPCTSTR pcCommPort = LPCTSTR( comPort );
hCom = CreateFile( pcCommPort,
GENERIC_READ | GENERIC_WRITE,
0, // exclusive-access
NULL, // default security attributes
OPEN_EXISTING,
0, // not overlapped I/O
NULL
);
if (hCom == INVALID_HANDLE_VALUE)
{
throw_executeCommand(env,"java/lang/Exception","INVALID_HANDLE_VALUE");
return env->NewStringUTF("");
}
//Clear all input output buffer
PurgeComm(hCom, PURGE_RXCLEAR | PURGE_TXCLEAR);
//Configure DCB
DCB PortDCB;
// Initialize the DCBlength member.
PortDCB.DCBlength = sizeof (DCB);
// Get the default port setting information.
GetCommState (hCom, &PortDCB);
// Change the DCB structure settings.
PortDCB.BaudRate = baudrate; // Zera specified baud
PortDCB.ByteSize = 8;
PortDCB.StopBits = TWOSTOPBITS;
PortDCB.Parity = PARITY_NONE;
PortDCB.fDtrControl = DTR_CONTROL_ENABLE;
PortDCB.fRtsControl = RTS_CONTROL_ENABLE | (RTS_CONTROL_TOGGLE ^ RTS_CONTROL_HANDSHAKE );
PortDCB.fAbortOnError = FALSE;
//Set Com Timeouts
COMMTIMEOUTS commTimeouts;
if(!GetCommTimeouts(hCom, &commTimeouts))
{
throw_executeCommand(env,"java/lang/Exception","Exception GetCommTimeouts");
return env->NewStringUTF("");
}
commTimeouts.ReadIntervalTimeout = 50;
commTimeouts.ReadTotalTimeoutMultiplier = 10;
commTimeouts.ReadTotalTimeoutConstant = 50;
commTimeouts.WriteTotalTimeoutConstant = 50;
commTimeouts.WriteTotalTimeoutMultiplier = 10;
if(!SetCommTimeouts(hCom, &commTimeouts))
{
throw_executeCommand(env,"java/lang/Exception","Exception SetCommTimeouts");
return env->NewStringUTF("");
}
if (!SetCommState (hCom, &PortDCB))
{
throw_executeCommand(env,"java/lang/Exception","Exception SetCommState");
return env->NewStringUTF("");
}
unsigned long numBytesWritten;
unsigned long numBytesRead;
BOOL bWriteResult;
bWriteResult = WriteFile(hCom,zCommand, sizeof (zCommand),&numBytesWritten,NULL);
if (!bWriteResult)
{
throw_executeCommand(env,"java/lang/Exception","Exception WriteFile");
return env->NewStringUTF("");
}
//Time to read
string result="";
BOOL bReadResult; //AAV,ARC,AGR1
char sBuffer[128];
BOOL datafound;
numBytesRead = 0;
while(true)
{
bReadResult = ReadFile(hCom, &sBuffer, 128, &numBytesRead, NULL);
if (numBytesRead > 0)
{
for (int i=0;i<(int)numBytesRead;i++)
{
datafound = TRUE;
if (sBuffer[i]=='\r')
{
result += '\n';
}else
{
result += sBuffer[i];
}
}
}
else
{
break;
}
}
if (datafound)
{
if (hCom != INVALID_HANDLE_VALUE)
{
cleanup_executeCommand();
return env->NewStringUTF(result.c_str());
}
}
cleanup_executeCommand();
throw_executeCommand(env,"java/lang/Exception","err");
return env->NewStringUTF("");

libavcodec transcoding - setting pts and dts

I'm trying to transcode video files into a standard format. This is the main loop I have:
for(frame_count = 0; av_read_frame(input_ctx, &in_packet) >= 0; frame_count++) {
if(in_packet.stream_index == video_stream_index) {
decodedPacket = 0;
rc = avcodec_decode_video2(video_in_codec, inputFrame, &decodedPacket, &in_packet);
if(decodedPacket) {
out_frames++;
rc = sws_scale(sws_ctx, inputFrame->data, inputFrame->linesize, 0, video_out_codec->height, outputFrame->data, outputFrame->linesize);
if(rc != video_out_codec->height) {
puts("scaling error");
}
outputFrame->pts = (1.0 / 30.0) * 90.0 * video_out_codec->frame_number;
rc = avcodec_encode_video2(video_out_codec, &out_packet, outputFrame, &encodedPacket);
if(rc != 0) {
puts("encoding error");
}
if(encodedPacket) {
if (video_out_stream->codec->coded_frame->key_frame) { // deprecated, what to use instead
out_packet.flags |= AV_PKT_FLAG_KEY;
}
out_packet.stream_index = 0;
rc = av_interleaved_write_frame(output_ctx, &out_packet);
if(rc != 0) {
puts("frame write error");
}
av_free_packet(&out_packet);
memset(&out_packet, 0, sizeof(AVPacket));
packet_count++;
}
av_free_packet(&in_packet);
}
}
...
When I run this I get strange values for tbr/tbn/tbc reported by ffprobe and the video is a mess.
I've tried adding code as per this answer e.g.
if (out_packet.pts != AV_NOPTS_VALUE) {
out_packet.pts = av_rescale_q(out_packet.pts, video_out_stream->codec->time_base, video_out_stream->time_base);
}
if (out_packet.pts != AV_NOPTS_VALUE) {
out_packet.dts = av_rescale_q(out_packet.dts, video_out_stream->codec->time_base, video_out_stream->time_base);
}
...but then I get errors such as this is the debug output:
[mp4 # 0x1038aba00] Delay between the first packet and last packet in the muxing queue is 10100000 > 10000000: forcing output
I'm setting the timebase correctly (I think)...
video_out_stream = avformat_new_stream(output_ctx, videoEncoder);
video_out_stream->id = 0;
video_out_stream->time_base.den = 30;
video_out_stream->time_base.num = 1;
video_out_codec = video_out_stream->codec;
avcodec_get_context_defaults3(video_out_codec, videoEncoder);
video_out_codec->codec_id = AV_CODEC_ID_H264;
video_out_codec->bit_rate = 2048;
video_out_codec->width = 854;
video_out_codec->height = 480;
video_out_codec->time_base.den = 30;
video_out_codec->time_base.num = 1;
video_out_codec->gop_size = 30;
video_out_codec->pix_fmt = AV_PIX_FMT_YUV420P;
Any thoughts about how I can calculate the correct output frame pts and encoded packet dts/pts? I'm clearly doing something wrong.

Resources