i am getting this error while uploading data to datastore using bulkuploader. Data used to be uploaded fine with the previous csv file. the new csv file has an extrafield that contains a list of strings. (ex. A,B,E,G,E,F). Following is the error that i get.
Traceback (most recent call last):
File "/opt/google_appengine_1.6.4/google/appengine/tools/adaptive_thread_pool.py", line 176, in WorkOnItems
status, instruction = item.PerformWork(self.__thread_pool)
File "/opt/google_appengine_1.6.4/google/appengine/tools/bulkloader.py", line 764, in PerformWork
transfer_time = self._TransferItem(thread_pool)
File "/opt/google_appengine_1.6.4/google/appengine/tools/bulkloader.py", line 935, in _TransferItem
self.request_manager.PostEntities(self.content)
File "/opt/google_appengine_1.6.4/google/appengine/tools/bulkloader.py", line 1420, in PostEntities
datastore.Put(entities)
File "/opt/google_appengine_1.6.4/google/appengine/api/datastore.py", line 576, in Put
return PutAsync(entities, **kwargs).get_result()
File "/opt/google_appengine_1.6.4/google/appengine/datastore/datastore_rpc.py", line 786, in get_result
results = self.__rpcs[0].get_result()
File "/opt/google_appengine_1.6.4/google/appengine/api/apiproxy_stub_map.py", line 592, in get_result
return self.__get_result_hook(self)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/datastore_rpc.py", line 1556, in __put_hook
self.check_rpc_success(rpc)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/datastore_rpc.py", line 1191, in check_rpc_success
rpc.check_success()
File "/opt/google_appengine_1.6.4/google/appengine/api/apiproxy_stub_map.py", line 558, in check_success
self.__rpc.CheckSuccess()
File "/opt/google_appengine_1.6.4/google/appengine/api/apiproxy_rpc.py", line 156, in _WaitImpl
self.request, self.response)
File "/opt/google_appengine_1.6.4/google/appengine/ext/remote_api/remote_api_stub.py", line 248, in MakeSyncCall
handler(request, response)
File "/opt/google_appengine_1.6.4/google/appengine/ext/remote_api/remote_api_stub.py", line 397, in _Dynamic_Put
'datastore_v3', 'Put', put_request, put_response)
File "/opt/google_appengine_1.6.4/google/appengine/ext/remote_api/remote_api_stub.py", line 177, in MakeSyncCall
self._MakeRealSyncCall(service, call, request, response)
File "/opt/google_appengine_1.6.4/google/appengine/ext/remote_api/remote_api_stub.py", line 185, in _MakeRealSyncCall
request_pb.set_request(request.Encode())
File "/opt/google_appengine_1.6.4/google/net/proto/ProtocolBuffer.py", line 56, in Encode
self.Output(e)
File "/opt/google_appengine_1.6.4/google/net/proto/ProtocolBuffer.py", line 205, in Output
self.OutputUnchecked(e)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/datastore_pb.py", line 4400, in OutputUnchecked
self.entity_[i].OutputUnchecked(out)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/entity_pb.py", line 2380, in OutputUnchecked
self.property_[i].OutputUnchecked(out)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/entity_pb.py", line 1307, in OutputUnchecked
self.value_.OutputUnchecked(out)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/entity_pb.py", line 945, in OutputUnchecked
self.referencevalue_.OutputUnchecked(out)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/entity_pb.py", line 675, in OutputUnchecked
self.pathelement_[i].OutputUnchecked(out)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/entity_pb.py", line 135, in OutputUnchecked
out.putVarInt64(self.id_)
File "/opt/google_appengine_1.6.4/google/net/proto/ProtocolBuffer.py", line 402, in putVarInt64
raise ProtocolBufferEncodeError, "int64 too big"
Changing the data type of problematic entries from IntegerProperty to StringProperty might help.
I was having the same problem, as I was storing user_id for Users entity as Integer, but when confronted with a bigger number, it simply can't hold it. So I am storing it as String now.
Related
I am trying to change the sequence of my invoicing. Instead of resetting it each new year, I can keep the count going upwards continuously.
(for example)
inv/2021/0001 date 1/1/2023 (this one should be 2366)
inv/2021/2365 date 31/12/2022
researching on the subject I found out I need to go into technical -> sequences to get the invoice numbers I want.
but my problem is, once i click sequences I get the following server error:
Error:
Odoo Server Error
Traceback (most recent call last):
File "/odoo/odoo-server/odoo/api.py", line 1039, in get
value = self._data[key][field][record._ids[0]]
KeyError: 254
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/odoo/odoo-server/odoo/fields.py", line 981, in __get__
value = record.env.cache.get(record, self)
File "/odoo/odoo-server/odoo/api.py", line 1041, in get
raise CacheMiss(record, field)
odoo.exceptions.CacheMiss: ('ir.sequence(254,).number_next_actual', None)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/odoo/odoo-server/odoo/http.py", line 656, in _handle_exception
return super(JsonRequest, self)._handle_exception(exception)
File "/odoo/odoo-server/odoo/http.py", line 314, in _handle_exception
raise pycompat.reraise(type(exception), exception, sys.exc_info()[2])
File "/odoo/odoo-server/odoo/tools/pycompat.py", line 87, in reraise
raise value
File "/odoo/odoo-server/odoo/http.py", line 698, in dispatch
result = self._call_function(**self.params)
File "/odoo/odoo-server/odoo/http.py", line 346, in _call_function
return checked_call(self.db, *args, **kwargs)
File "/odoo/odoo-server/odoo/service/model.py", line 97, in wrapper
return f(dbname, *args, **kwargs)
File "/odoo/odoo-server/odoo/http.py", line 339, in checked_call
result = self.endpoint(*a, **kw)
File "/odoo/odoo-server/odoo/http.py", line 941, in __call__
return self.method(*args, **kw)
File "/odoo/odoo-server/odoo/http.py", line 519, in response_wrap
response = f(*args, **kw)
File "/odoo/odoo-server/addons/web/controllers/main.py", line 904, in search_read
return self.do_search_read(model, fields, offset, limit, domain, sort)
File "/odoo/odoo-server/addons/web/controllers/main.py", line 926, in do_search_read
offset=offset or 0, limit=limit or False, order=sort or False)
File "/odoo/odoo-server/odoo/models.py", line 4589, in search_read
result = records.read(fields)
File "/odoo/odoo-server/odoo/models.py", line 2791, in read
vals[name] = convert(record[name], record, use_name_get)
File "/odoo/odoo-server/odoo/models.py", line 5117, in __getitem__
return self._fields[key].__get__(self, type(self))
File "/odoo/odoo-server/odoo/fields.py", line 985, in __get__
self.determine_value(record)
File "/odoo/odoo-server/odoo/fields.py", line 1098, in determine_value
self.compute_value(recs)
File "/odoo/odoo-server/odoo/fields.py", line 1052, in compute_value
self._compute_value(records)
File "/odoo/odoo-server/odoo/fields.py", line 1043, in _compute_value
getattr(records, self.compute)()
File "/odoo/odoo-server/odoo/addons/base/models/ir_sequence.py", line 96, in _get_number_next_actual
seq.number_next_actual = _predict_nextval(self, seq_id)
File "/odoo/odoo-server/odoo/addons/base/models/ir_sequence.py", line 68, in _predict_nextval
self.env.cr.execute(query % {'seq_id': seq_id})
File "/odoo/odoo-server/odoo/sql_db.py", line 148, in wrapper
return f(self, *args, **kwargs)
File "/odoo/odoo-server/odoo/sql_db.py", line 225, in execute
res = self._obj.execute(query, params)
psycopg2.ProgrammingError: relation "ir_sequence_1000015" does not exist
LINE 6: FROM ir_sequence_1000015
I believe it could be a database error but I am not sure what this is about. Any idea?
Thanks!
I'd like to use a Python library (pyod, latest) in a UDF that has a dependency on Numba (>= 0.50). I created an Aggregation UDF in Python and I am not new to the concept.
I got an error during while starting the job immediately after job execution.
Caused by: java.lang.RuntimeException: Error received from SDK harness for instruction 1: Traceback (most recent call last):
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 289, in _execute
response = task()
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 362, in <lambda>
lambda: self.create_worker().do_instruction(request), request)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 606, in do_instruction
return getattr(self, request_type)(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 637, in process_bundle
bundle_processor = self.bundle_processor_cache.get(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 463, in get
processor = bundle_processor.BundleProcessor(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 868, in __init__
self.ops = self.create_execution_tree(self.process_bundle_descriptor)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 921, in create_execution_tree
return collections.OrderedDict([(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 924, in <listcomp>
get_operation(transform_id))) for transform_id in sorted(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 812, in wrapper
result = cache[args] = func(*args)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 903, in get_operation
transform_consumers = {
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 904, in <dictcomp>
tag: [get_operation(op) for op in pcoll_consumers[pcoll_id]]
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 904, in <listcomp>
tag: [get_operation(op) for op in pcoll_consumers[pcoll_id]]
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 812, in wrapper
result = cache[args] = func(*args)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 908, in get_operation
return transform_factory.create_operation(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 1198, in create_operation
return creator(self, transform_id, transform_proto, payload, consumers)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/beam/beam_operations.py", line 89, in create_group_window_aggregate_function
return _create_user_defined_function_operation(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/beam/beam_operations.py", line 174, in _create_user_defined_function_operation
return beam_operation_cls(
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 210, in pyflink.fn_execution.beam.beam_operations_fast.StatefulFunctionOperation.__init__
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 129, in pyflink.fn_execution.beam.beam_operations_fast.FunctionOperation.__init__
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 214, in pyflink.fn_execution.beam.beam_operations_fast.StatefulFunctionOperation.generate_operation
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 446, in __init__
super(StreamGroupWindowAggregateOperation, self).__init__(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 309, in __init__
super(AbstractStreamGroupAggregateOperation, self).__init__(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 281, in __init__
super(BaseStatefulOperation, self).__init__(serialized_fn)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 80, in __init__
self.func, self.user_defined_funcs = self.generate_func(serialized_fn)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 329, in generate_func
extract_user_defined_aggregate_function(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/utils/operation_utils.py", line 221, in extract_user_defined_aggregate_function
user_defined_agg = load_aggregate_function(user_defined_function_proto.payload)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/utils/operation_utils.py", line 281, in load_aggregate_function
return pickle.loads(payload)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/pickle.py", line 29, in loads
return cloudpickle.loads(payload)
File "/tmp/python-dist-ca64683e-f3c8-4ff9-b2a8-8c95c5d508bd/python-files/blob_p-1eee456524b0a216bf998cb36288df034d60c922-5797c5572fd29f1e17b5dd686b627324/dbscan_udf.py", line 31, in <module>
from pyod.models.ecod import ECOD
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyod/__init__.py", line 4, in <module>
from . import utils
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyod/utils/__init__.py", line 12, in <module>
from .stat_models import pairwise_distances_no_broadcast
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyod/utils/stat_models.py", line 11, in <module>
from numba import njit
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/__init__.py", line 38, in <module>
from numba.core.decorators import (cfunc, generated_jit, jit, njit, stencil,
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/decorators.py", line 12, in <module>
from numba.stencils.stencil import stencil
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/stencils/stencil.py", line 11, in <module>
from numba.core import types, typing, utils, ir, config, ir_utils, registry
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/registry.py", line 4, in <module>
from numba.core import utils, typing, dispatcher, cpu
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/dispatcher.py", line 13, in <module>
from numba.core import (
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/compiler.py", line 6, in <module>
from numba.core import (utils, errors, typing, interpreter, bytecode, postproc,
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/callconv.py", line 12, in <module>
from numba.core.base import PYOBJECT, GENERIC_POINTER
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/base.py", line 24, in <module>
from numba.cpython import builtins
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/cpython/builtins.py", line 524, in <module>
from numba.core.typing.builtins import IndexValue, IndexValueType
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/typing/builtins.py", line 22, in <module>
#infer_global(print)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/typing/templates.py", line 1278, in register_global
if getattr(mod, val.__name__) is not val:
AttributeError: module 'pyflink.fn_execution.beam.beam_sdk_worker_main' has no attribute 'print'
My library version (important ones):
numba==0.55.1
numpy==1.19.5
apache-beam==2.27.0
apache-flink==1.15.1
pyod==1.0.4
Since it is a very strange error caused by Beam, I cannot interpret it.
Does anyone get any idea?
I got this error when i try to run odoo via terminal after configuring odoo and python in eclipse and created database. Here is the full error reference.
2021-06-08 13:51:17,499 14016 ERROR odoo_13A werkzeug: Error on request:
Traceback (most recent call last):
File "/home/davemax/.local/lib/python3.8/site-packages/werkzeug/serving.py", line 270, in run_wsgi
execute(self.server.app)
File "/home/davemax/.local/lib/python3.8/site-packages/werkzeug/serving.py", line 258, in execute
application_iter = app(environ, start_response)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/service/server.py", line 439, in app
return self.app(e, s)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/service/wsgi_server.py", line 142, in application
return application_unproxied(environ, start_response)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/service/wsgi_server.py", line 117, in application_unproxied
result = odoo.http.root(environ, start_response)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 1287, in __call__
return self.dispatch(environ, start_response)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 1257, in __call__
return self.app(environ, start_wrapped)
File "/home/davemax/.local/lib/python3.8/site-packages/werkzeug/wsgi.py", line 766, in __call__
return self.app(environ, start_response)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 1457, in dispatch
result = ir_http._dispatch()
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/addons/base/models/ir_http.py", line 238, in _dispatch
return cls._handle_exception(e)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/addons/base/models/ir_http.py", line 206, in _handle_exception
return request._handle_exception(exception)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 750, in _handle_exception
return super(HttpRequest, self)._handle_exception(exception)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 310, in _handle_exception
raise pycompat.reraise(type(exception), exception, sys.exc_info()[2])
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/tools/pycompat.py", line 14, in reraise
raise value
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/addons/base/models/ir_http.py", line 234, in _dispatch
result = request.dispatch()
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 809, in dispatch
r = self._call_function(**self.params)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 350, in _call_function
return checked_call(self.db, *args, **kwargs)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/service/model.py", line 94, in wrapper
return f(dbname, *args, **kwargs)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 339, in checked_call
result = self.endpoint(*a, **kw)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 915, in __call__
return self.method(*args, **kw)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 515, in response_wrap
response = f(*args, **kw)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/addons/web/controllers/main.py", line 844, in web_client
context = request.env['ir.http'].webclient_rendering_context()
AttributeError: 'ir.http' object has no attribute 'webclient_rendering_context' - - -
This is the full trace back. Please help me.
run your server using : -d cphhr_test -u all
just like :
/home/sadid/venv/bin/python3 /home/sadid/odoo-bin -c /home/sadid/odoo.conf -d cphhr_test -u all
Feature: Windows of size 10 minutes that slides by 5 minutes for data aggregate, then do something, almost 2GB data per window, 1 million data items.
Job params:
bin/yarn-session.sh -s 2 -jm 2048 -tm 48768 \
-Dyarn.containers.vcores=4 \
-Dtaskmanager.memory.managed.consumer-weights=DATAPROC:30,PYTHON:70 \
-Dtaskmanager.memory.managed.fraction=0.7 \
-Dtaskmanager.memory.task.off-heap.size=5120m \
-nm $task_name -qu $queue -d
Exception msg as below:
Traceback (most recent call last):
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 253, in _execute
response = task()
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 310, in <lambda>
lambda: self.create_worker().do_instruction(request), request)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 480, in do_instruction
getattr(request, request_type), request.instruction_id)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 515, in process_bundle
bundle_processor.process_bundle(instruction_id))
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/bundle_processor.py", line 978, in process_bundle
element.data)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/bundle_processor.py", line 218, in process_encoded
self.output(decoded_value)
File "apache_beam/runners/worker/operations.py", line 330, in apache_beam.runners.worker.operations.Operation.output
File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output
File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 71, in pyflink.fn_execution.beam.beam_operations_fast.FunctionOperation.process
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 73, in pyflink.fn_execution.beam.beam_operations_fast.FunctionOperation.process
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyflink/fn_execution/beam/beam_coder_impl_slow.py", line 627, in decode_from_stream
yield self._decode_one_batch_from_stream(in_stream, in_stream.read_var_int64())
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyflink/fn_execution/beam/beam_coder_impl_slow.py", line 638, in _decode_one_batch_from_stream
return arrow_to_pandas(self._timezone, self._field_types, [next(self._batch_reader)])
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyflink/fn_execution/beam/beam_coder_impl_slow.py", line 631, in _load_from_stream
reader = pa.ipc.open_stream(stream)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyarrow/ipc.py", line 137, in open_stream
return RecordBatchStreamReader(source)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyarrow/ipc.py", line 61, in __init__
self._open(source)
File "pyarrow/ipc.pxi", line 352, in pyarrow.lib._RecordBatchStreamReader._open
File "pyarrow/error.pxi", line 99, in pyarrow.lib.check_status
OSError: Expected IPC message of type schema but got record batch
Yes, this is indeed a bug, please refer to FLINK-21208
I tried to make a migrations on a Django project version 1.11.20.
But I have a error that I don't understand where it from.
They must had some migration before because the project work, I just can't add some modification to the project and apply a migration
Traceback (most recent call last):
File "manage.py", line 22, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.6/site-packages/django/core/management/__init__.py", line 364, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.6/site-packages/django/core/management/__init__.py", line 356, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.6/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.6/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.6/site-packages/django/core/management/commands/makemigrations.py", line 193, in handle
self.write_migration_files(changes)
File "/usr/local/lib/python3.6/site-packages/django/core/management/commands/makemigrations.py", line 231, in write_migration_files
migration_string = writer.as_string()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 163, in as_string
operation_string, operation_imports = OperationWriter(operation).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 120, in serialize
_write(arg_name, arg_value)
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 72, in _write
arg_string, arg_imports = MigrationWriter.serialize(item)
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 293, in serialize
return serializer_factory(value).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 44, in serialize
item_string, item_imports = serializer_factory(item).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 229, in serialize
return self.serialize_deconstructed(path, args, kwargs)
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 101, in serialize_deconstructed
arg_string, arg_imports = serializer_factory(arg).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 332, in serializer_factory
value = force_text(value)
File "/usr/local/lib/python3.6/site-packages/django/utils/encoding.py", line 76, in force_text
s = six.text_type(s)
File "/usr/local/lib/python3.6/site-packages/django/utils/functional.py", line 119, in __text_cast
return func(*self.__args, **self.__kw)
TypeError: ugettext() got an unexpected keyword argument 'default'