snowflake Python Connection KeyError: 'snowflake-connector-python - snowflake-cloud-data-platform

I have installed snowflake sqlachemy package. and successfully developed the script to upload a csv file to snowflake.
But when I upload the script to aws glue and supply the wheel file, I encountered KeyError: 'snowflake-connector-python.
Any idea?
here is the full error message:
Traceback (most recent call last):
File "/tmp/runscript.py", line 123, in <module>
runpy.run_path(temp_file_path, run_name='__main__')
File "/usr/local/lib/python3.6/runpy.py", line 263, in run_path
pkg_name=pkg_name, script_name=fname)
File "/usr/local/lib/python3.6/runpy.py", line 96, in _run_module_code
mod_name, mod_spec, pkg_name, script_name)
File "/usr/local/lib/python3.6/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/tmp/glue-python-scripts-e14jn3mj/etl_job_pchome_supply_chain.py", line 250, in <module>
File "/tmp/glue-python-scripts-e14jn3mj/etl_job_pchome_supply_chain.py", line 39, in main
File "/tmp/glue-python-scripts-e14jn3mj/etl_job_pchome_supply_chain.py", line 233, in load_data
File "/glue/lib/installation/sqlalchemy/engine/__init__.py", line 479, in create_engine
return strategy.create(*args, **kwargs)
File "/glue/lib/installation/sqlalchemy/engine/strategies.py", line 61, in create
entrypoint = u._get_entrypoint()
File "/glue/lib/installation/sqlalchemy/engine/url.py", line 172, in _get_entrypoint
cls = registry.load(name)
File "/glue/lib/installation/sqlalchemy/util/langhelpers.py", line 222, in load
return self.impls[name]()
File "/glue/lib/installation/sqlalchemy/util/langhelpers.py", line 245, in load
mod = compat.import_(modulepath)
File "/glue/lib/installation/snowflake/sqlalchemy/__init__.py", line 8, in <module>
from . import snowdialect
File "/glue/lib/installation/snowflake/sqlalchemy/snowdialect.py", line 27, in <module>
from snowflake.connector.constants import UTF8
File "/glue/lib/installation/snowflake/connector/__init__.py", line 17, in <module>
from .connection import SnowflakeConnection
File "/glue/lib/installation/snowflake/connector/connection.py", line 43, in <module>
from .cursor import LOG_MAX_QUERY_LENGTH, SnowflakeCursor
File "/glue/lib/installation/snowflake/connector/cursor.py", line 47, in <module>
from .arrow_result import ArrowResult
File "src/snowflake/connector/arrow_result.pyx", line 16, in init snowflake.connector.arrow_result
File "/glue/lib/installation/snowflake/connector/options.py", line 36, in <module>
_pandas_extras = pkg_resources.working_set.by_key['snowflake-connector-python']._dep_map['pandas']
KeyError: 'snowflake-connector-python'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/tmp/runscript.py", line 142, in <module>
raise e_type(e_value).with_traceback(new_stack)
File "/tmp/glue-python-scripts-e14jn3mj/etl_job_pchome_supply_chain.py", line 250, in <module>
File "/tmp/glue-python-scripts-e14jn3mj/etl_job_pchome_supply_chain.py", line 39, in main
File "/tmp/glue-python-scripts-e14jn3mj/etl_job_pchome_supply_chain.py", line 233, in load_data
File "/glue/lib/installation/sqlalchemy/engine/__init__.py", line 479, in create_engine
return strategy.create(*args, **kwargs)
File "/glue/lib/installation/sqlalchemy/engine/strategies.py", line 61, in create
entrypoint = u._get_entrypoint()
File "/glue/lib/installation/sqlalchemy/engine/url.py", line 172, in _get_entrypoint
cls = registry.load(name)
File "/glue/lib/installation/sqlalchemy/util/langhelpers.py", line 222, in load
return self.impls[name]()
File "/glue/lib/installation/sqlalchemy/util/langhelpers.py", line 245, in load
mod = compat.import_(modulepath)
File "/glue/lib/installation/snowflake/sqlalchemy/__init__.py", line 8, in <module>
from . import snowdialect
File "/glue/lib/installation/snowflake/sqlalchemy/snowdialect.py", line 27, in <module>
from snowflake.connector.constants import UTF8
File "/glue/lib/installation/snowflake/connector/__init__.py", line 17, in <module>
from .connection import SnowflakeConnection
File "/glue/lib/installation/snowflake/connector/connection.py", line 43, in <module>
from .cursor import LOG_MAX_QUERY_LENGTH, SnowflakeCursor
File "/glue/lib/installation/snowflake/connector/cursor.py", line 47, in <module>
from .arrow_result import ArrowResult
File "src/snowflake/connector/arrow_result.pyx", line 16, in init snowflake.connector.arrow_result
File "/glue/lib/installation/snowflake/connector/options.py", line 36, in <module>
_pandas_extras = pkg_resources.working_set.by_key['snowflake-connector-python']._dep_map['pandas']
KeyError: KeyError('snowflake-connector-python',)
And this is my requirement.txt
snowflake-connector-python
snowflake-sqlalchemy
sqlalchemy
pyarrow
pandas
asn1crypto==1.4.0
awscli==1.18.140
awswrangler==1.9.3
azure-common==1.1.25
azure-core==1.8.1
azure-storage-blob==12.5.0
boto3==1.14.63
botocore==1.17.63
certifi==2020.6.20
cffi==1.14.2
chardet==3.0.4
colorama==0.4.3 ; python_version != '3.4'
cryptography==2.9.2
docutils==0.15.2
fsspec==0.8.2
idna==2.9
isodate==0.6.0
jmespath==0.10.0
msrest==0.6.19
numpy==1.19.2
oauthlib==3.1.0
oscrypto==1.2.1
packaging==20.4 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'
psycopg2-binary==2.8.6
pyasn1==0.4.8
pycparser==2.20
pycryptodomex==3.9.8
pyjwt==1.7.1
pymysql==0.9.0
pyopenssl==19.1.0
pyparsing==2.4.7
python-dateutil==2.8.1
pytz==2020.1
pyyaml==5.3.1 ; python_version != '3.4'
requests-oauthlib==1.3.0
requests==2.23.0
rsa==4.5 ; python_version != '3.4'
s3fs==0.4.2
s3transfer==0.3.3
six==1.15.0
sqlalchemy-redshift==0.8.1
urllib3==1.25.10
xlrd==1.2.0
from the log file, here are the packages installed
Successfully installed asn1crypto-1.4.0 awscli-1.18.140 awswrangler-1.9.3 azure-common-1.1.25 azure-core-1.8.1 azure-storage-blob-12.5.0 boto3-1.14.63 botocore-1.17.63 certifi-2020.6.20 cffi-1.14.2 chardet-3.0.4 colorama-0.4.3 cryptography-2.9.2 dependencies-0.1.0 docutils-0.15.2 fsspec-0.8.2 idna-2.9 isodate-0.6.0 jmespath-0.10.0 msrest-0.6.19 numpy-1.19.2 oauthlib-3.1.0 oscrypto-1.2.1 packaging-20.4 pandas-1.1.2 psycopg2-binary-2.8.6 pyarrow-1.0.1 pyasn1-0.4.8 pycparser-2.20 pycryptodomex-3.9.8 pyjwt-1.7.1 pymysql-0.9.0 pyopenssl-19.1.0 pyparsing-2.4.7 python-dateutil-2.8.1 pytz-2020.1 pyyaml-5.3.1 requests-2.23.0 requests-oauthlib-1.3.0 rsa-4.5 s3fs-0.4.2 s3transfer-0.3.3 six-1.15.0 snowflake-connector-python-2.3.2 snowflake-sqlalchemy-1.2.3 sqlalchemy-1.3.19 sqlalchemy-redshift-0.8.1 urllib3-1.25.10 xlrd-1.2.0

I recommend you do a "clean" build of all the main libraries, and in this include the pyarrow library.
e.g.
pip install --upgrade --force-reinstall pandas
pip install --upgrade --force-reinstall pyarrow
pip install --upgrade --force-reinstall snowflake-connector-python
pip install --upgrade --force-reinstall sqlalchemy
pip install --upgrade --force-reinstall snowflake-sqlalchemy
There have been some issues here and there with old versions or missing versions of pyarrow, even the order of install could be a problem I believe. A fresh pip install or ensuring you have the latest/greatest of these libraries is a good way to go.

Related

Using Numba in Flink Python UDFs

I'd like to use a Python library (pyod, latest) in a UDF that has a dependency on Numba (>= 0.50). I created an Aggregation UDF in Python and I am not new to the concept.
I got an error during while starting the job immediately after job execution.
Caused by: java.lang.RuntimeException: Error received from SDK harness for instruction 1: Traceback (most recent call last):
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 289, in _execute
response = task()
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 362, in <lambda>
lambda: self.create_worker().do_instruction(request), request)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 606, in do_instruction
return getattr(self, request_type)(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 637, in process_bundle
bundle_processor = self.bundle_processor_cache.get(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/sdk_worker.py", line 463, in get
processor = bundle_processor.BundleProcessor(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 868, in __init__
self.ops = self.create_execution_tree(self.process_bundle_descriptor)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 921, in create_execution_tree
return collections.OrderedDict([(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 924, in <listcomp>
get_operation(transform_id))) for transform_id in sorted(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 812, in wrapper
result = cache[args] = func(*args)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 903, in get_operation
transform_consumers = {
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 904, in <dictcomp>
tag: [get_operation(op) for op in pcoll_consumers[pcoll_id]]
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 904, in <listcomp>
tag: [get_operation(op) for op in pcoll_consumers[pcoll_id]]
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 812, in wrapper
result = cache[args] = func(*args)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 908, in get_operation
return transform_factory.create_operation(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/apache_beam/runners/worker/bundle_processor.py", line 1198, in create_operation
return creator(self, transform_id, transform_proto, payload, consumers)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/beam/beam_operations.py", line 89, in create_group_window_aggregate_function
return _create_user_defined_function_operation(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/beam/beam_operations.py", line 174, in _create_user_defined_function_operation
return beam_operation_cls(
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 210, in pyflink.fn_execution.beam.beam_operations_fast.StatefulFunctionOperation.__init__
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 129, in pyflink.fn_execution.beam.beam_operations_fast.FunctionOperation.__init__
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 214, in pyflink.fn_execution.beam.beam_operations_fast.StatefulFunctionOperation.generate_operation
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 446, in __init__
super(StreamGroupWindowAggregateOperation, self).__init__(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 309, in __init__
super(AbstractStreamGroupAggregateOperation, self).__init__(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 281, in __init__
super(BaseStatefulOperation, self).__init__(serialized_fn)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 80, in __init__
self.func, self.user_defined_funcs = self.generate_func(serialized_fn)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/table/operations.py", line 329, in generate_func
extract_user_defined_aggregate_function(
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/utils/operation_utils.py", line 221, in extract_user_defined_aggregate_function
user_defined_agg = load_aggregate_function(user_defined_function_proto.payload)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/utils/operation_utils.py", line 281, in load_aggregate_function
return pickle.loads(payload)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyflink/fn_execution/pickle.py", line 29, in loads
return cloudpickle.loads(payload)
File "/tmp/python-dist-ca64683e-f3c8-4ff9-b2a8-8c95c5d508bd/python-files/blob_p-1eee456524b0a216bf998cb36288df034d60c922-5797c5572fd29f1e17b5dd686b627324/dbscan_udf.py", line 31, in <module>
from pyod.models.ecod import ECOD
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyod/__init__.py", line 4, in <module>
from . import utils
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyod/utils/__init__.py", line 12, in <module>
from .stat_models import pairwise_distances_no_broadcast
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/pyod/utils/stat_models.py", line 11, in <module>
from numba import njit
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/__init__.py", line 38, in <module>
from numba.core.decorators import (cfunc, generated_jit, jit, njit, stencil,
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/decorators.py", line 12, in <module>
from numba.stencils.stencil import stencil
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/stencils/stencil.py", line 11, in <module>
from numba.core import types, typing, utils, ir, config, ir_utils, registry
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/registry.py", line 4, in <module>
from numba.core import utils, typing, dispatcher, cpu
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/dispatcher.py", line 13, in <module>
from numba.core import (
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/compiler.py", line 6, in <module>
from numba.core import (utils, errors, typing, interpreter, bytecode, postproc,
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/callconv.py", line 12, in <module>
from numba.core.base import PYOBJECT, GENERIC_POINTER
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/base.py", line 24, in <module>
from numba.cpython import builtins
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/cpython/builtins.py", line 524, in <module>
from numba.core.typing.builtins import IndexValue, IndexValueType
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/typing/builtins.py", line 22, in <module>
#infer_global(print)
File "/home/ubuntu/miniconda3/envs/py38/lib/python3.8/site-packages/numba/core/typing/templates.py", line 1278, in register_global
if getattr(mod, val.__name__) is not val:
AttributeError: module 'pyflink.fn_execution.beam.beam_sdk_worker_main' has no attribute 'print'
My library version (important ones):
numba==0.55.1
numpy==1.19.5
apache-beam==2.27.0
apache-flink==1.15.1
pyod==1.0.4
Since it is a very strange error caused by Beam, I cannot interpret it.
Does anyone get any idea?

I cann't creat new table in sqlite3 from django

When I added new string in models.py his tables did't create in sqlite3. What I do wrong, again :)
python manage.py makemigrations
python manage.py migrate - I did!
It's happend, when I don't input templates information after messege:
Please enter the default value now as valid Python.
The datetime and django.utils.timezone module are available so you can do e.g. timezone.now.
I imputed just 1 and enter. After thet it broke.
If you need different information give me know, please.
Environment:
Request Method: POST
Request URL: http://127.0.0.1:8000/admin/meetups/meetup/add/
Django Version: 3.2.8
Python Version: 3.10.0
Installed Applications:
['django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'meetups']
Installed Middleware:
['django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware']
Traceback (most recent call last):
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\utils.py", line 84, in _execute
return self.cursor.execute(sql, params)
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\sqlite3\base.py", line 423, in execute
return Database.Cursor.execute(self, query, params)
The above exception (table meetups_meetup has no column named enormus) was the direct cause of the following exception:
File "C:\install\Projects_1\env\lib\site-packages\django\core\handlers\exception.py", line 47, in inner
response = get_response(request)
File "C:\install\Projects_1\env\lib\site-packages\django\core\handlers\base.py", line 181, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "C:\install\Projects_1\env\lib\site-packages\django\contrib\admin\options.py", line 616, in wrapper
return self.admin_site.admin_view(view)(*args, **kwargs)
File "C:\install\Projects_1\env\lib\site-packages\django\utils\decorators.py", line 130, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "C:\install\Projects_1\env\lib\site-packages\django\views\decorators\cache.py", line 44, in _wrapped_view_func
response = view_func(request, *args, **kwargs)
File "C:\install\Projects_1\env\lib\site-packages\django\contrib\admin\sites.py", line 232, in inner
return view(request, *args, **kwargs)
File "C:\install\Projects_1\env\lib\site-packages\django\contrib\admin\options.py", line 1657, in add_view
return self.changeform_view(request, None, form_url, extra_context)
File "C:\install\Projects_1\env\lib\site-packages\django\utils\decorators.py", line 43, in _wrapper
return bound_method(*args, **kwargs)
File "C:\install\Projects_1\env\lib\site-packages\django\utils\decorators.py", line 130, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "C:\install\Projects_1\env\lib\site-packages\django\contrib\admin\options.py", line 1540, in changeform_view
return self._changeform_view(request, object_id, form_url, extra_context)
File "C:\install\Projects_1\env\lib\site-packages\django\contrib\admin\options.py", line 1586, in _changeform_view
self.save_model(request, new_object, form, not add)
File "C:\install\Projects_1\env\lib\site-packages\django\contrib\admin\options.py", line 1099, in save_model
obj.save()
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\base.py", line 726, in save
self.save_base(using=using, force_insert=force_insert,
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\base.py", line 763, in save_base
updated = self._save_table(
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\base.py", line 868, in _save_table
results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw)
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\base.py", line 906, in _do_insert
return manager._insert(
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\manager.py", line 85, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\query.py", line 1270, in _insert
return query.get_compiler(using=using).execute_sql(returning_fields)
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\sql\compiler.py", line 1416, in execute_sql
cursor.execute(sql, params)
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\utils.py", line 98, in execute
return super().execute(sql, params)
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\utils.py", line 66, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\utils.py", line 75, in _execute_with_wrappers
return executor(sql, params, many, context)
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\utils.py", line 79, in _execute
with self.db.wrap_database_errors:
File "C:\install\Projects_1\env\lib\site-packages\django\db\utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\utils.py", line 84, in _execute
return self.cursor.execute(sql, params)
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\sqlite3\base.py", line 423, in execute
return Database.Cursor.execute(self, query, params)
Exception Type: OperationalError at /admin/meetups/meetup/add/
Exception Value: table meetups_meetup has no column named enormus
Errors, when I input: makemigration and migrate
(env) PS C:\install\Projects_1> python manage.py makemigrations
System check identified some issues:
WARNINGS:
meetups.Meetup.participant: (fields.W340) null has no effect on ManyToManyField.
You are trying to add a non-nullable field 'dates' to meetup without a default; we can't do that (the database needs something to populate existing rows).
Please select a fix:
1) Provide a one-off default now (will be set on all existing rows with a null value for this column)
2) Quit, and let me add a default in models.py
Select an option: 1
Please enter the default value now, as valid Python
The datetime and django.utils.timezone modules are available, so you can do e.g. timezone.now
Type 'exit' to exit this prompt
>>> '2021-10-10'
You are trying to add a non-nullable field 'organizer_emails' to meetup without a default; we can't do that (the database needs something to populate existing rows).
Please select a fix:
1) Provide a one-off default now (will be set on all existing rows with a null value for this column)
2) Quit, and let me add a default in models.py
Select an option: 1
The datetime and django.utils.timezone modules are available, so you can do e.g. timezone.now
Type 'exit' to exit this prompt
>>> test#.test.com
Invalid input: invalid syntax (<string>, line 1)
>>> 'test#.test.com'
Migrations for 'meetups':
meetups\migrations\0011_auto_20211026_2116.py
- Remove field enormus from meetup
- Add field dates to meetup
- Add field organizer_emails to meetup
(env) PS C:\install\Projects_1> python manage.py migrate
System check identified some issues:
WARNINGS:
meetups.Meetup.participant: (fields.W340) null has no effect on ManyToManyField.
Operations to perform:
Apply all migrations: admin, auth, contenttypes, meetups, sessions
Running migrations:
Applying meetups.0005_auto_20211026_1234...Traceback (most recent call last):
File "C:\install\Projects_1\manage.py", line 22, in <module>
main()
File "C:\install\Projects_1\manage.py", line 18, in main
execute_from_command_line(sys.argv)
File "C:\install\Projects_1\env\lib\site-packages\django\core\management\__init__.py", line 419, in execute_from_command_line
utility.execute()
File "C:\install\Projects_1\env\lib\site-packages\django\core\management\__init__.py", line 413, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "C:\install\Projects_1\env\lib\site-packages\django\core\management\base.py", line 354, in run_from_argv
self.execute(*args, **cmd_options)
File "C:\install\Projects_1\env\lib\site-packages\django\core\management\base.py", line 398, in execute
output = self.handle(*args, **options)
File "C:\install\Projects_1\env\lib\site-packages\django\core\management\base.py", line 89, in wrapped
res = handle_func(*args, **kwargs)
File "C:\install\Projects_1\env\lib\site-packages\django\core\management\commands\migrate.py", line 244, in handle
post_migrate_state = executor.migrate(
File "C:\install\Projects_1\env\lib\site-packages\django\db\migrations\executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "C:\install\Projects_1\env\lib\site-packages\django\db\migrations\executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "C:\install\Projects_1\env\lib\site-packages\django\db\migrations\executor.py", line 227, in apply_migration
state = migration.apply(state, schema_editor)
File "C:\install\Projects_1\env\lib\site-packages\django\db\migrations\migration.py", line 126, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "C:\install\Projects_1\env\lib\site-packages\django\db\migrations\operations\fields.py", line 104, in database_forwards
schema_editor.add_field(
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\sqlite3\schema.py", line 330, in add_field
self._remake_table(model, create_field=field)
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\sqlite3\schema.py", line 191, in _remake_table
self.effective_default(create_field)
File "C:\install\Projects_1\env\lib\site-packages\django\db\backends\base\schema.py", line 324, in effective_default
return field.get_db_prep_save(self._effective_default(field), self.connection)
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\fields\__init__.py", line 842, in get_db_prep_save
return self.get_db_prep_value(value, connection=connection, prepared=False)
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\fields\__init__.py", line 1271, in get_db_prep_value
value = self.get_prep_value(value)
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\fields\__init__.py", line 1266, in get_prep_value
return self.to_python(value)
File "C:\install\Projects_1\env\lib\site-packages\django\db\models\fields\__init__.py", line 1228, in to_python
parsed = parse_date(value)
File "C:\install\Projects_1\env\lib\site-packages\django\utils\dateparse.py", line 75, in parse_date
match = date_re.match(value)
TypeError: expected string or bytes-like object

PyFlink Expected IPC message of type schema but got record batch

Feature: Windows of size 10 minutes that slides by 5 minutes for data aggregate, then do something, almost 2GB data per window, 1 million data items.
Job params:
bin/yarn-session.sh -s 2 -jm 2048 -tm 48768 \
-Dyarn.containers.vcores=4 \
-Dtaskmanager.memory.managed.consumer-weights=DATAPROC:30,PYTHON:70 \
-Dtaskmanager.memory.managed.fraction=0.7 \
-Dtaskmanager.memory.task.off-heap.size=5120m \
-nm $task_name -qu $queue -d
Exception msg as below:
Traceback (most recent call last):
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 253, in _execute
response = task()
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 310, in <lambda>
lambda: self.create_worker().do_instruction(request), request)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 480, in do_instruction
getattr(request, request_type), request.instruction_id)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/sdk_worker.py", line 515, in process_bundle
bundle_processor.process_bundle(instruction_id))
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/bundle_processor.py", line 978, in process_bundle
element.data)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/apache_beam/runners/worker/bundle_processor.py", line 218, in process_encoded
self.output(decoded_value)
File "apache_beam/runners/worker/operations.py", line 330, in apache_beam.runners.worker.operations.Operation.output
File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output
File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 71, in pyflink.fn_execution.beam.beam_operations_fast.FunctionOperation.process
File "pyflink/fn_execution/beam/beam_operations_fast.pyx", line 73, in pyflink.fn_execution.beam.beam_operations_fast.FunctionOperation.process
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyflink/fn_execution/beam/beam_coder_impl_slow.py", line 627, in decode_from_stream
yield self._decode_one_batch_from_stream(in_stream, in_stream.read_var_int64())
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyflink/fn_execution/beam/beam_coder_impl_slow.py", line 638, in _decode_one_batch_from_stream
return arrow_to_pandas(self._timezone, self._field_types, [next(self._batch_reader)])
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyflink/fn_execution/beam/beam_coder_impl_slow.py", line 631, in _load_from_stream
reader = pa.ipc.open_stream(stream)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyarrow/ipc.py", line 137, in open_stream
return RecordBatchStreamReader(source)
File "/data1/hadoopdata/nodemanager/local/usercache/prod_intl_discount_car/appcache/application_1571902879759_12031/python-dist-2659d300-efda-4c34-863d-d5a3a8aa369f/python-archives/venv.zip/venv/lib/python3.7/site-packages/pyarrow/ipc.py", line 61, in __init__
self._open(source)
File "pyarrow/ipc.pxi", line 352, in pyarrow.lib._RecordBatchStreamReader._open
File "pyarrow/error.pxi", line 99, in pyarrow.lib.check_status
OSError: Expected IPC message of type schema but got record batch
Yes, this is indeed a bug, please refer to FLINK-21208

Cant apply flask db migrate when using PyODBC w/ SQL Server. Error: Neither DSN nor SERVER keyword supplied (0). Connecting via pyodbc.connect works

I am trying to use Flask SQLAlchemy to connect to a SQL database hosted on Azure. Everything works when I use pyodbc.connect directly, like so:
import pyodbc
# I got the ODBC connection string directly from Azure's connection strings for the db.
odbc_string = 'Driver={ODBC Driver 17 for SQL Server};Server=tcp:sql-server-for-capstone.database.windows.net,1433;Database=MastersCapstone;Uid={my username};Pwd={my super secure password here};Encrypt=yes;TrustServerCertificate=no;Connection Timeout=30;'
conn = pyodbc.connect(odbc_string)
# Works!
But when I try to connect using Flask SQLAlchemy, I get:
sqlalchemy.exc.OperationalError: (pyodbc.OperationalError) ('08001', '[08001] [Microsoft][ODBC Driver 17 for SQL Server]Neither DSN nor SERVER keyword supplied (0) (SQLDriverConnect)')
The config I am using to connect via SQLAlchemy looks like this:
odbc_string = 'Driver={ODBC Driver 17 for SQL Server};Server=tcp:sql-server-for-capstone.database.windows.net,1433;Database=MastersCapstone;Uid={my username};Pwd={my super secure password here};Encrypt=yes;TrustServerCertificate=no;Connection Timeout=30;'
params = urllib.parse.quote_plus(odbc_string)
sqlalchemy_db_uri = f"mssql+pyodbc:///?odbc_connect={params}"
class Config:
# Database
SQLALCHEMY_DATABASE_URI = sqlalchemy_db_uri
And then I initialize the app like so in the directory's __init__.py file:
from flask import Flask
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from src.python.server.config import Config
db = SQLAlchemy()
migrate = Migrate(compare_type=True)
def create_app():
app = Flask(__name__)
app.config.from_object(Config)
db.init_app(app)
migrate.init_app(app, db)
with app.app_context():
from src.python.server import routes, models
return app
Finally, I get the error when I run the following in the terminal:
flask db migrate
Please note that everything worked fine when I was using a PostgreSQL database. Does anyone have any idea why this might be happening? I'm pretty stuck!
Additional info:
Output from running odbcinst -j in the terminal:
unixODBC 2.3.7
DRIVERS............: /usr/local/etc/odbcinst.ini
SYSTEM DATA SOURCES: /usr/local/etc/odbc.ini
FILE DATA SOURCES..: /usr/local/etc/ODBCDataSources
USER DATA SOURCES..: /Users/jordan/.odbc.ini
SQLULEN Size.......: 8
SQLLEN Size........: 8
SQLSETPOSIROW Size.: 8
Entire stack trace:
Traceback (most recent call last):
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 2285, in _wrap_pool_connect
return fn()
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 303, in unique_connection
return _ConnectionFairy._checkout(self)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 773, in _checkout
fairy = _ConnectionRecord.checkout(pool)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 492, in checkout
rec = pool._do_get()
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/impl.py", line 238, in _do_get
return self._create_connection()
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 308, in _create_connection
return _ConnectionRecord(self)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 437, in __init__
self.__connect(first_connect_check=True)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 657, in __connect
pool.logger.debug("Error on connect(): %s", e)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/util/langhelpers.py", line 68, in __exit__
compat.raise_(
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 178, in raise_
raise exception
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 652, in __connect
connection = pool._invoke_creator(self)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/strategies.py", line 114, in connect
return dialect.connect(*cargs, **cparams)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/default.py", line 490, in connect
return self.dbapi.connect(*cargs, **cparams)
pyodbc.OperationalError: ('08001', '[08001] [Microsoft][ODBC Driver 17 for SQL Server]Neither DSN nor SERVER keyword supplied (0) (SQLDriverConnect)')
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Users/jordan/Coding/survey-app/venv/bin/flask", line 8, in <module>
sys.exit(main())
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/flask/cli.py", line 967, in main
cli.main(args=sys.argv[1:], prog_name="python -m flask" if as_module else None)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/flask/cli.py", line 586, in main
return super(FlaskGroup, self).main(*args, **kwargs)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/flask/cli.py", line 426, in decorator
return __ctx.invoke(f, *args, **kwargs)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/flask_migrate/cli.py", line 89, in migrate
_migrate(directory, message, sql, head, splice, branch_label, version_path,
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/flask_migrate/__init__.py", line 96, in wrapped
f(*args, **kwargs)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/flask_migrate/__init__.py", line 210, in migrate
command.revision(config, message, autogenerate=True, sql=sql,
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/alembic/command.py", line 214, in revision
script_directory.run_env()
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/alembic/script/base.py", line 489, in run_env
util.load_python_file(self.dir, "env.py")
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/alembic/util/pyfiles.py", line 98, in load_python_file
module = load_module_py(module_id, path)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/alembic/util/compat.py", line 184, in load_module_py
spec.loader.exec_module(module)
File "<frozen importlib._bootstrap_external>", line 783, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "migrations/env.py", line 96, in <module>
run_migrations_online()
File "migrations/env.py", line 81, in run_migrations_online
with connectable.connect() as connection:
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 2218, in connect
return self._connection_cls(self, **kwargs)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 103, in __init__
else engine.raw_connection()
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 2317, in raw_connection
return self._wrap_pool_connect(
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 2288, in _wrap_pool_connect
Connection._handle_dbapi_exception_noconnection(
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 1554, in _handle_dbapi_exception_noconnection
util.raise_(
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 178, in raise_
raise exception
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 2285, in _wrap_pool_connect
return fn()
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 303, in unique_connection
return _ConnectionFairy._checkout(self)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 773, in _checkout
fairy = _ConnectionRecord.checkout(pool)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 492, in checkout
rec = pool._do_get()
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/impl.py", line 238, in _do_get
return self._create_connection()
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 308, in _create_connection
return _ConnectionRecord(self)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 437, in __init__
self.__connect(first_connect_check=True)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 657, in __connect
pool.logger.debug("Error on connect(): %s", e)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/util/langhelpers.py", line 68, in __exit__
compat.raise_(
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 178, in raise_
raise exception
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 652, in __connect
connection = pool._invoke_creator(self)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/strategies.py", line 114, in connect
return dialect.connect(*cargs, **cparams)
File "/Users/jordan/Coding/survey-app/venv/lib/python3.8/site-packages/sqlalchemy/engine/default.py", line 490, in connect
return self.dbapi.connect(*cargs, **cparams)
sqlalchemy.exc.OperationalError: (pyodbc.OperationalError) ('08001', '[08001] [Microsoft][ODBC Driver 17 for SQL Server]Neither DSN nor SERVER keyword supplied (0) (SQLDriverConnect)')
(Background on this error at: http://sqlalche.me/e/e3q8)
I was able to solve this by workaround.
I edited migrations/env.py file:
import os
import urllib
DB_CREDENTIALS = os.environ.get("ENV_DB_CREDENTIALS")
SQLALCHEMY_DATABASE_URI = (
"mssql+pyodbc:///?odbc_connect=%s" % urllib.parse.quote_plus(DB_CREDENTIALS)
)
config.set_main_option(
"sqlalchemy.url",
SQLALCHEMY_DATABASE_URI.replace("%", "%%"),
)
I also read somewhere Trusted_Connection=Yes is issue some reason, not sure why.

Migrations fails on Django 1.11.20 with ugettext() got an unexpected keyword argument 'default'

I tried to make a migrations on a Django project version 1.11.20.
But I have a error that I don't understand where it from.
They must had some migration before because the project work, I just can't add some modification to the project and apply a migration
Traceback (most recent call last):
File "manage.py", line 22, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.6/site-packages/django/core/management/__init__.py", line 364, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.6/site-packages/django/core/management/__init__.py", line 356, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.6/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.6/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.6/site-packages/django/core/management/commands/makemigrations.py", line 193, in handle
self.write_migration_files(changes)
File "/usr/local/lib/python3.6/site-packages/django/core/management/commands/makemigrations.py", line 231, in write_migration_files
migration_string = writer.as_string()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 163, in as_string
operation_string, operation_imports = OperationWriter(operation).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 120, in serialize
_write(arg_name, arg_value)
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 72, in _write
arg_string, arg_imports = MigrationWriter.serialize(item)
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/writer.py", line 293, in serialize
return serializer_factory(value).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 44, in serialize
item_string, item_imports = serializer_factory(item).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 229, in serialize
return self.serialize_deconstructed(path, args, kwargs)
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 101, in serialize_deconstructed
arg_string, arg_imports = serializer_factory(arg).serialize()
File "/usr/local/lib/python3.6/site-packages/django/db/migrations/serializer.py", line 332, in serializer_factory
value = force_text(value)
File "/usr/local/lib/python3.6/site-packages/django/utils/encoding.py", line 76, in force_text
s = six.text_type(s)
File "/usr/local/lib/python3.6/site-packages/django/utils/functional.py", line 119, in __text_cast
return func(*self.__args, **self.__kw)
TypeError: ugettext() got an unexpected keyword argument 'default'

Resources