Using Numba in Flink Python UDFs - apache-flink

I'd like to use a Python library (pyod, latest) in a UDF that has a dependency on Numba (>= 0.50). I created an Aggregation UDF in Python and I am not new to the concept.
I got an error during while starting the job immediately after job execution.
Caused by: java.lang.RuntimeException: Error received from SDK harness for instruction 1: Traceback (most recent call last):
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 289, in _execute
response = task()
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 362, in <lambda>
lambda: self.create_worker().do_instruction(request), request)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 606, in do_instruction
return getattr(self, request_type)(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 637, in process_bundle
bundle_processor = self.bundle_processor_cache.get(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 463, in get
processor = bundle_processor.BundleProcessor(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 868, in __init__
self.ops = self.create_execution_tree(self.process_bundle_descriptor)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 921, in create_execution_tree
return collections.OrderedDict([(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 924, in <listcomp>
get_operation(transform_id))) for transform_id in sorted(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 812, in wrapper
result = cache[args] = func(*args)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 903, in get_operation
transform_consumers = {
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 904, in <dictcomp>
tag: [get_operation(op) for op in pcoll_consumers[pcoll_id]]
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 904, in <listcomp>
tag: [get_operation(op) for op in pcoll_consumers[pcoll_id]]
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 812, in wrapper
result = cache[args] = func(*args)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 908, in get_operation
return transform_factory.create_operation(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 1198, in create_operation
return creator(self, transform_id, transform_proto, payload, consumers)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/beam/beam_operations.py", line 89, in create_group_window_aggregate_function
return _create_user_defined_function_operation(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/beam/beam_operations.py", line 174, in _create_user_defined_function_operation
return beam_operation_cls(
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 210, in pyflink.fn_execution.beam.beam_operations_fast.StatefulFunctionOperation.__init__
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 129, in pyflink.fn_execution.beam.beam_operations_fast.FunctionOperation.__init__
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 214, in pyflink.fn_execution.beam.beam_operations_fast.StatefulFunctionOperation.generate_operation
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 446, in __init__
super(StreamGroupWindowAggregateOperation, self).__init__(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 309, in __init__
super(AbstractStreamGroupAggregateOperation, self).__init__(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 281, in __init__
super(BaseStatefulOperation, self).__init__(serialized_fn)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 80, in __init__
self.func, self.user_defined_funcs = self.generate_func(serialized_fn)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 329, in generate_func
extract_user_defined_aggregate_function(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/utils/operation_utils.py", line 221, in extract_user_defined_aggregate_function
user_defined_agg = load_aggregate_function(user_defined_function_proto.payload)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/utils/operation_utils.py", line 281, in load_aggregate_function
return pickle.loads(payload)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/pickle.py", line 29, in loads
return cloudpickle.loads(payload)
File "/tmp/python-dist-ca64683e-f3c8-4ff9-b2a8-8c95c5d508bd/python-files/blob_p-1eee456524b0a216bf998cb36288df034d60c922-5797c5572fd29f1e17b5dd686b627324/dbscan_udf.py", line 31, in <module>
from pyod.models.ecod import ECOD
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyod/__init__.py", line 4, in <module>
from . import utils
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyod/utils/__init__.py", line 12, in <module>
from .stat_models import pairwise_distances_no_broadcast
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyod/utils/stat_models.py", line 11, in <module>
from numba import njit
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/__init__.py", line 38, in <module>
from numba.core.decorators import (cfunc, generated_jit, jit, njit, stencil,
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/decorators.py", line 12, in <module>
from numba.stencils.stencil import stencil
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/stencils/stencil.py", line 11, in <module>
from numba.core import types, typing, utils, ir, config, ir_utils, registry
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/registry.py", line 4, in <module>
from numba.core import utils, typing, dispatcher, cpu
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/dispatcher.py", line 13, in <module>
from numba.core import (
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/compiler.py", line 6, in <module>
from numba.core import (utils, errors, typing, interpreter, bytecode, postproc,
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/callconv.py", line 12, in <module>
from numba.core.base import PYOBJECT, GENERIC_POINTER
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/base.py", line 24, in <module>
from numba.cpython import builtins
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/cpython/builtins.py", line 524, in <module>
from numba.core.typing.builtins import IndexValue, IndexValueType
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/typing/builtins.py", line 22, in <module>
#infer_global(print)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/typing/templates.py", line 1278, in register_global
if getattr(mod, val.__name__) is not val:
AttributeError: module 'pyflink.fn_execution.beam.beam_sdk_worker_main' has no attribute 'print'
My library version (important ones):
numba==0.55.1
numpy==1.19.5
apache-beam==2.27.0
apache-flink==1.15.1
pyod==1.0.4
Since it is a very strange error caused by Beam, I cannot interpret it.
Does anyone get any idea?

Related

How to solve the error 'AttributeError: 'ir.http' object has no attribute 'webclient_rendering_context' - - -'?

I got this error when i try to run odoo via terminal after configuring odoo and python in eclipse and created database. Here is the full error reference.
2021-06-08 13:51:17,499 14016 ERROR odoo_13A werkzeug: Error on request:
Traceback (most recent call last):
File "/home/davemax/.local/lib/python3.8/site-packages/werkzeug/serving.py", line 270, in run_wsgi
execute(self.server.app)
File "/home/davemax/.local/lib/python3.8/site-packages/werkzeug/serving.py", line 258, in execute
application_iter = app(environ, start_response)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/service/server.py", line 439, in app
return self.app(e, s)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/service/wsgi_server.py", line 142, in application
return application_unproxied(environ, start_response)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/service/wsgi_server.py", line 117, in application_unproxied
result = odoo.http.root(environ, start_response)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 1287, in __call__
return self.dispatch(environ, start_response)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 1257, in __call__
return self.app(environ, start_wrapped)
File "/home/davemax/.local/lib/python3.8/site-packages/werkzeug/wsgi.py", line 766, in __call__
return self.app(environ, start_response)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 1457, in dispatch
result = ir_http._dispatch()
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/addons/base/models/ir_http.py", line 238, in _dispatch
return cls._handle_exception(e)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/addons/base/models/ir_http.py", line 206, in _handle_exception
return request._handle_exception(exception)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 750, in _handle_exception
return super(HttpRequest, self)._handle_exception(exception)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 310, in _handle_exception
raise pycompat.reraise(type(exception), exception, sys.exc_info()[2])
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/tools/pycompat.py", line 14, in reraise
raise value
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/addons/base/models/ir_http.py", line 234, in _dispatch
result = request.dispatch()
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 809, in dispatch
r = self._call_function(**self.params)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 350, in _call_function
return checked_call(self.db, *args, **kwargs)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/service/model.py", line 94, in wrapper
return f(dbname, *args, **kwargs)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 339, in checked_call
result = self.endpoint(*a, **kw)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 915, in __call__
return self.method(*args, **kw)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/odoo/http.py", line 515, in response_wrap
response = f(*args, **kw)
File "/home/davemax/odoo/workspace/odoo_13/odoo_13/addons/web/controllers/main.py", line 844, in web_client
context = request.env['ir.http'].webclient_rendering_context()
AttributeError: 'ir.http' object has no attribute 'webclient_rendering_context' - - -
This is the full trace back. Please help me.
run your server using : -d cphhr_test -u all
just like :
/home/sadid/venv/bin/python3 /home/sadid/odoo-bin -c /home/sadid/odoo.conf -d cphhr_test -u all

PyFlink Expected IPC message of type schema but got record batch

Feature: Windows of size 10 minutes that slides by 5 minutes for data aggregate, then do something, almost 2GB data per window, 1 million data items.
Job params:
bin/yarn-session.sh -s 2 -jm 2048 -tm 48768 \
-Dyarn.containers.vcores=4 \
-Dtaskmanager.memory.managed.consumer-weights=DATAPROC:30,PYTHON:70 \
-Dtaskmanager.memory.managed.fraction=0.7 \
-Dtaskmanager.memory.task.off-heap.size=5120m \
-nm $task_name -qu $queue -d
Exception msg as below:
Traceback (most recent call last):
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 253, in _execute
response = task()
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 310, in <lambda>
lambda: self.create_worker().do_instruction(request), request)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 480, in do_instruction
getattr(request, request_type), request.instruction_id)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 515, in process_bundle
bundle_processor.process_bundle(instruction_id))
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/bundle_processor.py", line 978, in process_bundle
element.data)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/bundle_processor.py", line 218, in process_encoded
self.output(decoded_value)
File "apache_beam/runners/worker/operations.py", line 330, in apache_beam.runners.worker.operations.Operation.output
File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output
File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 71, in pyflink.fn_execution.beam.beam_operations_fast.FunctionOperation.process
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 73, in pyflink.fn_execution.beam.beam_operations_fast.FunctionOperation.process
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyflink/fn_execution/beam/beam_coder_impl_slow.py", line 627, in decode_from_stream
yield self._decode_one_batch_from_stream(in_stream, in_stream.read_var_int64())
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyflink/fn_execution/beam/beam_coder_impl_slow.py", line 638, in _decode_one_batch_from_stream
return arrow_to_pandas(self._timezone, self._field_types, [next(self._batch_reader)])
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyflink/fn_execution/beam/beam_coder_impl_slow.py", line 631, in _load_from_stream
reader = pa.ipc.open_stream(stream)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyarrow/ipc.py", line 137, in open_stream
return RecordBatchStreamReader(source)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyarrow/ipc.py", line 61, in __init__
self._open(source)
File "pyarrow/ipc.pxi", line 352, in pyarrow.lib._RecordBatchStreamReader._open
File "pyarrow/error.pxi", line 99, in pyarrow.lib.check_status
OSError: Expected IPC message of type schema but got record batch
Yes, this is indeed a bug, please refer to FLINK-21208

Migrations fails on Django 1.11.20 with ugettext() got an unexpected keyword argument 'default'

I tried to make a migrations on a Django project version 1.11.20.
But I have a error that I don't understand where it from.
They must had some migration before because the project work, I just can't add some modification to the project and apply a migration
Traceback (most recent call last):
File "manage.py", line 22, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.6/site-packages/django/core/management/__init__.py", line 364, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.6/site-packages/django/core/management/__init__.py", line 356, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.6/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.6/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.6/site-packages/django/core/management/commands/makemigrations.py", line 193, in handle
self.write_migration_files(changes)
File "/usr/local/lib/python3.6/site-packages/django/core/management/commands/makemigrations.py", line 231, in write_migration_files
migration_string = writer.as_string()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 163, in as_string
operation_string, operation_imports = OperationWriter(operation).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 120, in serialize
_write(arg_name, arg_value)
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 72, in _write
arg_string, arg_imports = MigrationWriter.serialize(item)
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 293, in serialize
return serializer_factory(value).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 44, in serialize
item_string, item_imports = serializer_factory(item).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 229, in serialize
return self.serialize_deconstructed(path, args, kwargs)
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 101, in serialize_deconstructed
arg_string, arg_imports = serializer_factory(arg).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 332, in serializer_factory
value = force_text(value)
File "/usr/local/lib/python3.6/site-packages/django/utils/encoding.py", line 76, in force_text
s = six.text_type(s)
File "/usr/local/lib/python3.6/site-packages/django/utils/functional.py", line 119, in __text_cast
return func(*self.__args, **self.__kw)
TypeError: ugettext() got an unexpected keyword argument 'default'

Wagtail TypeError: 'parent' is an invalid keyword argument for this function

I got following error and the problem is that I do not know what causes this error, I can not add that whole code. I use method described here Inline Panels and Model Clusters If it is possible to find out what exactly leads to this error, tell me please, and I'll post the code.
Just in case, here is a link to the model. gist
And here is my wagtail_hooks.py. gist2
$ py manage.py check
Traceback (most recent call last):
File "manage.py", line 12, in <module>
execute_from_command_line(sys.argv)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\core\management\__init__.py", line 364, in execute_from_command_line
utility.execute()
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\core\management\__init__.py", line 356, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\core\management\base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\core\management\base.py", line 330, in execute
output = self.handle(*args, **options)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\core\management\commands\check.py", line 68, in handle
fail_level=getattr(checks, options['fail_level']),
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\core\management\base.py", line 359, in check
include_deployment_checks=include_deployment_checks,
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\core\management\base.py", line 346, in _run_checks
return checks.run_checks(**kwargs)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\core\checks\registry.py", line 81, in run_checks
new_errors = check(app_configs=app_configs)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\core\checks\urls.py", line 16, in check_url_config
return check_resolver(resolver)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\core\checks\urls.py", line 26, in check_resolver
return check_method()
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\urls\resolvers.py", line 254, in check
for pattern in self.url_patterns:
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\utils\functional.py", line 35, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\urls\resolvers.py", line 405, in url_patterns
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\utils\functional.py", line 35, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\urls\resolvers.py", line 398, in urlconf_module
return import_module(self.urlconf_name)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\importlib\__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 978, in _gcd_import
File "<frozen importlib._bootstrap>", line 961, in _find_and_load
File "<frozen importlib._bootstrap>", line 950, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 655, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 205, in _call_with_frames_removed
File "C:\Users\xakep\GitHub\treichle_cup\treichle_cup\urls.py", line 7, in <module>
from wagtail.wagtailadmin import urls as wagtailadmin_urls
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\wagtail\wagtailadmin\urls\__init__.py", line 14, in <module>
from wagtail.wagtailadmin.api import urls as api_urls
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\wagtail\wagtailadmin\api\urls.py", line 13, in <module>
for fn in hooks.get_hooks('construct_admin_api'):
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\wagtail\wagtailcore\hooks.py", line 49, in get_hooks
search_for_hooks()
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\wagtail\wagtailcore\hooks.py", line 43, in search_for_hooks
list(get_app_submodules('wagtail_hooks'))
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\wagtail\utils\apps.py", line 25, in get_app_submodules
yield name, import_module('%s.%s' % (name, submodule_name))
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\importlib\__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "C:\Users\xakep\GitHub\treichle_cup\team_rooster\wagtail_hooks.py", line 47, in <module>
modeladmin_register(TournamentModelAdmin)
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\wagtail\contrib\modeladmin\options.py", line 594, in modeladmin_register
instance = modeladmin_class()
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\wagtail\contrib\modeladmin\options.py", line 522, in __init__
self.modeladmin_instances.append(ModelAdminClass(parent=self))
File "C:\Users\xakep\GitHub\treichle_cup\env\lib\site-packages\django\db\models\base.py", line 573, in __init__
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
TypeError: 'parent' is an invalid keyword argument for this function
UPDATE
I found out that is related to wagtail_hooks.py I just removed my TournamentModelAdmin class
And if i do check again i didn't get errors.
As #lb-ben-johnston said my mistake was that I directly added the model. So I just changed my code from that:
class TournamentModelAdmin(ModelAdminGroup):
menu_label = 'Tournament'
menu_icon = 'folder-open-inverse'
menu_order = 500
items = (GoupstageTournamentModel,
FinalphaseTournamentModel, TournamentPage)
modeladmin_register(TournamentModelAdmin)
To:
class TournamentModelAdmin(ModelAdmin):
model = TournamentPage
menu_label = 'Tournament'
menu_icon = 'fa-trophy'
list_display = ('starts_at', 'ends_at')
list_per_page = 5
search_fields = ('title', 'starts_at')
class GroupstageModelAdmin(ModelAdmin):
model = GroupstageTournamentModel
menu_label = 'Gruppenphase'
menu_icon = 'fa-user-times'
list_display = ('number', 'starts_at',
'team_1_dress', 'team_1', 'team_1_total_score', 'team_1_total_points',
'team_2_total_points', 'team_2_total_score', 'team_2', 'team_2_dress')
list_per_page = 10
list_filter = ('number', 'starts_at')
search_fields = ('number', 'starts_at')
class FinalstageModelAdmin(ModelAdmin):
model = FinalphaseTournamentModel
menu_label = 'Finalphase'
menu_icon = 'fa-user-plus'
search_fields = ('number', 'starts_at')
class TreichleCupModelAdmin(ModelAdminGroup):
menu_label = 'Treichle-Cup'
menu_icon = 'folder-open-inverse'
menu_order = 200
items = (
TournamentModelAdmin,
GroupstageModelAdmin,
FinalstageModelAdmin,
)
modeladmin_register(TreichleCupModelAdmin)
And its works well! Thanks #lb-ben-johnston

Google App Engine : Bulkuploader : int64 too big error

i am getting this error while uploading data to datastore using bulkuploader. Data used to be uploaded fine with the previous csv file. the new csv file has an extrafield that contains a list of strings. (ex. A,B,E,G,E,F). Following is the error that i get.
Traceback (most recent call last):
File "/opt/google_appengine_1.6.4/google/appengine/tools/adaptive_thread_pool.py", line 176, in WorkOnItems
status, instruction = item.PerformWork(self.__thread_pool)
File "/opt/google_appengine_1.6.4/google/appengine/tools/bulkloader.py", line 764, in PerformWork
transfer_time = self._TransferItem(thread_pool)
File "/opt/google_appengine_1.6.4/google/appengine/tools/bulkloader.py", line 935, in _TransferItem
self.request_manager.PostEntities(self.content)
File "/opt/google_appengine_1.6.4/google/appengine/tools/bulkloader.py", line 1420, in PostEntities
datastore.Put(entities)
File "/opt/google_appengine_1.6.4/google/appengine/api/datastore.py", line 576, in Put
return PutAsync(entities, **kwargs).get_result()
File "/opt/google_appengine_1.6.4/google/appengine/datastore/datastore_rpc.py", line 786, in get_result
results = self.__rpcs[0].get_result()
File "/opt/google_appengine_1.6.4/google/appengine/api/apiproxy_stub_map.py", line 592, in get_result
return self.__get_result_hook(self)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/datastore_rpc.py", line 1556, in __put_hook
self.check_rpc_success(rpc)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/datastore_rpc.py", line 1191, in check_rpc_success
rpc.check_success()
File "/opt/google_appengine_1.6.4/google/appengine/api/apiproxy_stub_map.py", line 558, in check_success
self.__rpc.CheckSuccess()
File "/opt/google_appengine_1.6.4/google/appengine/api/apiproxy_rpc.py", line 156, in _WaitImpl
self.request, self.response)
File "/opt/google_appengine_1.6.4/google/appengine/ext/remote_api/remote_api_stub.py", line 248, in MakeSyncCall
handler(request, response)
File "/opt/google_appengine_1.6.4/google/appengine/ext/remote_api/remote_api_stub.py", line 397, in _Dynamic_Put
'datastore_v3', 'Put', put_request, put_response)
File "/opt/google_appengine_1.6.4/google/appengine/ext/remote_api/remote_api_stub.py", line 177, in MakeSyncCall
self._MakeRealSyncCall(service, call, request, response)
File "/opt/google_appengine_1.6.4/google/appengine/ext/remote_api/remote_api_stub.py", line 185, in _MakeRealSyncCall
request_pb.set_request(request.Encode())
File "/opt/google_appengine_1.6.4/google/net/proto/ProtocolBuffer.py", line 56, in Encode
self.Output(e)
File "/opt/google_appengine_1.6.4/google/net/proto/ProtocolBuffer.py", line 205, in Output
self.OutputUnchecked(e)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/datastore_pb.py", line 4400, in OutputUnchecked
self.entity_[i].OutputUnchecked(out)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/entity_pb.py", line 2380, in OutputUnchecked
self.property_[i].OutputUnchecked(out)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/entity_pb.py", line 1307, in OutputUnchecked
self.value_.OutputUnchecked(out)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/entity_pb.py", line 945, in OutputUnchecked
self.referencevalue_.OutputUnchecked(out)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/entity_pb.py", line 675, in OutputUnchecked
self.pathelement_[i].OutputUnchecked(out)
File "/opt/google_appengine_1.6.4/google/appengine/datastore/entity_pb.py", line 135, in OutputUnchecked
out.putVarInt64(self.id_)
File "/opt/google_appengine_1.6.4/google/net/proto/ProtocolBuffer.py", line 402, in putVarInt64
raise ProtocolBufferEncodeError, "int64 too big"
Changing the data type of problematic entries from IntegerProperty to StringProperty might help.
I was having the same problem, as I was storing user_id for Users entity as Integer, but when confronted with a bigger number, it simply can't hold it. So I am storing it as String now.

Resources