Embed pymysql within the collection and use default test container

This change eliminates the need to install the connector on each
controlled node, as `pymysql` version 1.1.1 is now included. As a
result, we can safely assume its availability, thus simplifying the
testing process.

Also, I managed to remove the need for pre-built test containers. We
now use the default test containers from ansible-test.
This commit is contained in:
Laurent Indermuehle 2024-06-07 14:05:40 +02:00
commit 04af62c400
No known key found for this signature in database
GPG key ID: 93FA944C9F34DD09
49 changed files with 4392 additions and 979 deletions

View file

@ -36,7 +36,7 @@ jobs:
pull-request-change-detection: true
integration:
name: "Integration (Python: ${{ matrix.python }}, Ansible: ${{ matrix.ansible }}, DB: ${{ matrix.db_engine_name }} ${{ matrix.db_engine_version }}, connector: ${{ matrix.connector_name }} ${{ matrix.connector_version }})"
name: "Integration (Python: ${{ matrix.python }}, Ansible: ${{ matrix.ansible }}, DB: ${{ matrix.db_engine_name }} ${{ matrix.db_engine_version }})"
runs-on: ubuntu-22.04
strategy:
fail-fast: false
@ -59,28 +59,8 @@ jobs:
- '3.8'
- '3.9'
- '3.10'
connector_name:
- pymysql
- mysqlclient
connector_version:
- 0.7.11
- 0.9.3
- 1.0.2
- 2.0.1
- 2.0.3
- 2.1.1
include:
- python: '3.9' # RHEL9 uses 3.9 by default
connector_version: '0.10.1' # From RHEL package python3-PyMySQL
connector_name: pymysql
- python: '3.11'
connector_version: '1.0.2' # From RHEL package python3.11-PyMySQL
connector_name: pymysql
- python: '3.12'
connector_version: '1.1.0' # From RHEL package python3.12-PyMySQL
connector_name: pymysql
- '3.11'
- '3.12'
exclude:
- db_engine_name: mysql
db_engine_version: 10.4.27
@ -97,48 +77,6 @@ jobs:
- db_engine_name: mariadb
db_engine_version: 8.0.31
- connector_name: pymysql
connector_version: 2.0.1
- connector_name: pymysql
connector_version: 2.0.3
- connector_name: pymysql
connector_version: 2.1.1
- connector_name: mysqlclient
connector_version: 0.7.11
- connector_name: mysqlclient
connector_version: 0.10.1
- connector_name: mysqlclient
connector_version: 0.9.3
- connector_name: mysqlclient
connector_version: 1.0.2
- connector_name: mysqlclient
connector_version: 1.1.0
- db_engine_name: mariadb
connector_version: 0.7.11
- db_engine_version: 5.7.40
python: '3.9'
- db_engine_version: 5.7.40
python: '3.10'
- db_engine_version: 5.7.40
ansible: stable-2.15
- db_engine_version: 5.7.40
ansible: stable-2.16
- db_engine_version: 5.7.40
ansible: devel
- db_engine_version: 8.0.31
python: '3.8'
@ -154,39 +92,6 @@ jobs:
- db_engine_version: 10.6.11
python: '3.9'
- python: '3.8'
connector_version: 1.0.2
- python: '3.8'
connector_version: 2.0.3
- python: '3.8'
connector_version: 2.1.1
- python: '3.9'
connector_version: 0.7.11
- python: '3.9'
connector_version: 1.0.2
- python: '3.9'
connector_version: 2.0.1
- python: '3.9'
connector_version: 2.1.1
- python: '3.10'
connector_version: 0.7.11
- python: '3.10'
connector_version: 0.9.3
- python: '3.10'
connector_version: 2.0.1
- python: '3.10'
connector_version: 2.0.3
- python: '3.8'
ansible: stable-2.16
@ -270,37 +175,6 @@ jobs:
${{ job.services.db_primary.id }}
| grep healthy && [[ "$SECONDS" -lt 120 ]]; do sleep 1; done
- name: Compute docker_image - Set python_version_flat
run: >
echo "python_version_flat=$(echo ${{ matrix.python }}
| tr -d '.')" >> $GITHUB_ENV
- name: Compute docker_image - Set connector_version_flat
run: >
echo "connector_version_flat=$(echo ${{ matrix.connector_version }}
|tr -d .)" >> $GITHUB_ENV
- name: Compute docker_image - Set db_engine_version_flat
run: >
echo "db_engine_version_flat=$(echo ${{ matrix.db_engine_version }}
| awk -F '.' '{print $1 $2}')" >> $GITHUB_ENV
- name: Compute docker_image - Set db_client
run: >
if [[ ${{ env.db_engine_version_flat }} == 57 ]]; then
echo "db_client=my57" >> $GITHUB_ENV;
else
echo "db_client=$(echo ${{ matrix.db_engine_name }})" >> $GITHUB_ENV;
fi
- name: Set docker_image
run: |-
echo "docker_image=ghcr.io/ansible-collections/community.mysql\
/test-container-${{ env.db_client }}\
-py${{ env.python_version_flat }}\
-${{ matrix.connector_name }}${{ env.connector_version_flat }}\
:latest" >> $GITHUB_ENV
- name: >-
Perform integration testing against
Ansible version ${{ matrix.ansible }}
@ -318,14 +192,6 @@ jobs:
echo -n "${{ matrix.db_engine_version }}"
> tests/integration/db_engine_version;
echo Setting Connector name to "${{ matrix.connector_name }}"...;
echo -n "${{ matrix.connector_name }}"
> tests/integration/connector_name;
echo Setting Connector name to "${{ matrix.connector_version }}"...;
echo -n "${{ matrix.connector_version }}"
> tests/integration/connector_version;
echo Setting Python version to "${{ matrix.python }}"...;
echo -n "${{ matrix.python }}"
> tests/integration/python;
@ -333,7 +199,6 @@ jobs:
echo Setting Ansible version to "${{ matrix.ansible }}"...;
echo -n "${{ matrix.ansible }}"
> tests/integration/ansible
docker-image: ${{ env.docker_image }}
target-python-version: ${{ matrix.python }}
testing-type: integration

View file

@ -1,242 +0,0 @@
---
name: Build Docker Image for ansible-test
on: # yamllint disable-line rule:truthy
workflow_call:
inputs:
registry:
required: true
type: string
image_name:
required: true
type: string
context:
required: true
type: string
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
include:
- from: ubuntu2004
db_client: mariadb
python_minor: '8'
connector_name: pymysql
connector_major: '0'
connector_minor: '9'
connector_release: '3'
- from: ubuntu2004
db_client: mariadb
python_minor: '8'
connector_name: mysqlclient
connector_major: '2'
connector_minor: '0'
connector_release: '1'
- from: ubuntu2204
db_client: mariadb
python_minor: '9' # RHEL9 uses 3.9 by default
connector_name: pymysql
# Same ver. as RHEL package python3-PyMySQL
connector_major: '0'
connector_minor: '10'
connector_release: '1'
- from: ubuntu2004
db_client: mariadb
python_minor: '9'
connector_name: mysqlclient
connector_major: '2'
connector_minor: '0'
connector_release: '3'
- from: ubuntu2204
db_client: mariadb
python_minor: '10'
connector_name: pymysql
connector_major: '1'
connector_minor: '0'
connector_release: '2'
- from: ubuntu2204
db_client: mariadb
python_minor: '10'
connector_name: mysqlclient
connector_major: '2'
connector_minor: '1'
connector_release: '1'
- from: ubuntu2204
db_client: mariadb
python_minor: '11' # RHEL9 uses 3.9 by default
connector_name: pymysql
# Same ver. as RHEL package python3.11-PyMySQL
connector_major: '1'
connector_minor: '0'
connector_release: '2'
- from: ubuntu2204
db_client: mariadb
python_minor: '12' # RHEL9 uses 3.9 by default
connector_name: pymysql
# Same ver. as RHEL package python3.12-PyMySQL
connector_major: '1'
connector_minor: '1'
connector_release: '0'
- from: ubuntu2004
db_client: mysql
python_minor: '8'
connector_name: pymysql
connector_major: '0'
connector_minor: '9'
connector_release: '3'
- from: ubuntu2004
db_client: mysql
python_minor: '8'
connector_name: mysqlclient
connector_major: '2'
connector_minor: '0'
connector_release: '1'
- from: ubuntu2004
db_client: mysql
python_minor: '9'
connector_name: pymysql
connector_major: '0'
connector_minor: '10'
connector_release: '1'
- from: ubuntu2004
db_client: mysql
python_minor: '9'
connector_name: mysqlclient
connector_major: '2'
connector_minor: '0'
connector_release: '3'
- from: ubuntu2204
db_client: mysql
python_minor: '10'
connector_name: pymysql
connector_major: '1'
connector_minor: '0'
connector_release: '2'
- from: ubuntu2204
db_client: mysql
python_minor: '10'
connector_name: mysqlclient
connector_major: '2'
connector_minor: '1'
connector_release: '1'
- from: ubuntu2204
db_client: mysql
python_minor: '11' # RHEL9 uses 3.9 by default
connector_name: pymysql
# Same ver. as RHEL package python3.11-PyMySQL
connector_major: '1'
connector_minor: '0'
connector_release: '2'
- from: ubuntu2204
db_client: mysql
python_minor: '12' # RHEL9 uses 3.9 by default
connector_name: pymysql
# Same ver. as RHEL package python3.12-PyMySQL
connector_major: '1'
connector_minor: '1'
connector_release: '0'
env:
connector_version:
"${{ matrix.connector_major }}.\
${{ matrix.connector_minor }}.\
${{ matrix.connector_release }}"
steps:
# Requirement to use 'context' in docker/build-push-action@v3
- name: Checkout repository
uses: actions/checkout@v3
# https://github.com/docker/login-action
- name: Log into registry ghcr.io
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# https://github.com/docker/metadata-action
- name: Extract Docker metadata (tags, labels)
id: meta
uses: docker/metadata-action@v4
with:
images:
" ghcr.io\
/${{ github.repository }}\
/test-container-${{ matrix.db_client }}-\
py3${{ matrix.python_minor }}-\
${{ matrix.connector_name }}${{ matrix.connector_major }}\
${{ matrix.connector_minor }}${{ matrix.connector_release }}"
tags: latest
# Setting up Docker Buildx with docker-container driver is required
# at the moment to be able to use a subdirectory with Git context
#
# https://github.com/docker/setup-buildx-action
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
# https://github.com/docker/build-push-action
- name: Build and push Docker image with Buildx
id: build-and-push
uses: docker/build-push-action@v3
with:
context: |
FROM quay.io/ansible/${{ matrix.from }}-test-container:main
RUN apt-get update -y && \
DEBIAN_FRONTEND=noninteractive apt-get upgrade -y \
--no-install-recommends && \
DEBIAN_FRONTEND=noninteractive apt-get install -y \
--no-install-recommends \
python3${{ matrix.python_minor }} \
python3${{ matrix.python_minor }}-dev \
iproute2 \
build-essential \
if [[ "${{ matrix.db_client }}" == "mysql" ]]; then
RUN DEBIAN_FRONTEND=noninteractive apt-get install -y \
--no-install-recommends default-libmysqlclient-dev \
mysql-client
else
RUN DEBIAN_FRONTEND=noninteractive apt-get install -y \
--no-install-recommends mariadb-client
fi
RUN python3${{ matrix.python_minor }} -m pip install \
--disable-pip-version-check \
--no-cache-dir \
cffi \
${{ matrix.connector_name }}==$connector_version
ENV container=docker
CMD ["/sbin/init"]
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

View file

@ -15,9 +15,6 @@ endif
db_ver_tuple := $(subst ., , $(db_engine_version))
db_engine_version_flat := $(word 1, $(db_ver_tuple))$(word 2, $(db_ver_tuple))
con_ver_tuple := $(subst ., , $(connector_version))
connector_version_flat := $(word 1, $(con_ver_tuple))$(word 2, $(con_ver_tuple))$(word 3, $(con_ver_tuple))
py_ver_tuple := $(subst ., , $(python))
python_version_flat := $(word 1, $(py_ver_tuple))$(word 2, $(py_ver_tuple))
@ -32,8 +29,6 @@ endif
test-integration:
@echo -n $(db_engine_name) > tests/integration/db_engine_name
@echo -n $(db_engine_version) > tests/integration/db_engine_version
@echo -n $(connector_name) > tests/integration/connector_name
@echo -n $(connector_version) > tests/integration/connector_version
@echo -n $(python) > tests/integration/python
@echo -n $(ansible) > tests/integration/ansible
@ -94,16 +89,13 @@ test-integration:
https://github.com/ansible/ansible/archive/$(ansible).tar.gz; \
set -x; \
ansible-test integration $(target) -v --color --coverage --diff \
--docker ghcr.io/ansible-collections/community.mysql/test-container\
-$(db_client)-py$(python_version_flat)-$(connector_name)$(connector_version_flat):latest \
--docker \
--docker-network podman $(_continue_on_errors) $(_keep_containers_alive) --python $(python); \
set +x
# End of venv
rm tests/integration/db_engine_name
rm tests/integration/db_engine_version
rm tests/integration/connector_name
rm tests/integration/connector_version
rm tests/integration/python
rm tests/integration/ansible
ifndef keep_containers_alive

View file

@ -126,13 +126,10 @@ For MariaDB, only Long Term releases are tested.
- mysqlclient 2.0.3 (only collection version >= 3.5.2)
- mysqlclient 2.1.1 (only collection version >= 3.5.2)
## External requirements
Starting with 3.9.1, pymysql is now included. It means it is no longer necessary to install the connector on the controller or the controlled nodes. Here is the version included:
The MySQL modules rely on a MySQL connector. The list of supported drivers is below:
- [PyMySQL](https://github.com/PyMySQL/PyMySQL)
- [mysqlclient](https://github.com/PyMySQL/mysqlclient)
- Support for other Python MySQL connectors may be added in a future release.
- community.mysql 3.9.1: pymysql 1.1.1
s
## Using this collection

View file

@ -26,14 +26,6 @@ For now, the makefile only supports Podman.
- Minimum 2GB of RAM
### Custom ansible-test containers
Our integrations tests use custom containers for ansible-test. Those images have their definition file stored in the directory [test-containers](test-containers/). We build and publish the images on ghcr.io under the ansible-collection namespace: E.G.:
`ghcr.io/ansible-collections/community.mysql/test-container-mariadb106-py310-mysqlclient211:latest`.
Availables images are listed [here](https://github.com/orgs/ansible-collections/packages).
### Makefile options
The Makefile accept the following options
@ -72,24 +64,6 @@ The Makefile accept the following options
- "10.6.11" <- mariadb
- Description: The tag of the container to use for the service containers that will host a primary database and two replicas. Do not use short version, like `mysql:8` (don't do that) because our tests expect a full version to filter tests precisely. For instance: `when: db_version is version ('8.0.22', '>')`. You can use any tag available on [hub.docker.com/_/mysql](https://hub.docker.com/_/mysql) and [hub.docker.com/_/mariadb](https://hub.docker.com/_/mariadb) but GitHub Action will only use the versions listed above.
- `connector_name`
- Mandatory: true
- Choices:
- "pymysql"
- "mysqlclient"
- Description: The python package of the connector to use. In addition to selecting the test container, this value is also used for tests filtering: `when: connector_name == 'pymysql'`.
- `connector_version`
- Mandatory: true
- Choices:
- "0.7.11" <- pymysql (Only for MySQL 5.7)
- "0.9.3" <- pymysql
- "1.0.2" <- pymysql
- "2.0.1" <- mysqlclient
- "2.0.3" <- mysqlclient
- "2.1.1" <- mysqlclient
- Description: The version of the python package of the connector to use. This value is used to filter tests meant for other connectors.
- `python`
- Mandatory: true
- Choices:
@ -124,17 +98,17 @@ tests will overwrite the 3 databases containers so no need to kill them in advan
```sh
# Run all targets
make ansible="stable-2.12" db_engine_name="mysql" db_engine_version="5.7.40" python="3.8" connector_name="pymysql" connector_version="0.7.11"
make ansible="stable-2.12" db_engine_name="mysql" db_engine_version="5.7.40" python="3.8"
# A single target
make ansible="stable-2.14" db_engine_name="mysql" db_engine_version="5.7.40" python="3.8" connector_name="pymysql" connector_version="0.7.11" target="test_mysql_info"
make ansible="stable-2.14" db_engine_name="mysql" db_engine_version="5.7.40" python="3.8" target="test_mysql_info"
# Keep databases and ansible tests containers alives
# A single target and continue on errors
make ansible="stable-2.14" db_engine_name="mysql" db_engine_version="8.0.31" python="3.9" connector_name="mysqlclient" connector_version="2.0.3" target="test_mysql_query" keep_containers_alive=1 continue_on_errors=1
make ansible="stable-2.14" db_engine_name="mysql" db_engine_version="8.0.31" python="3.9" target="test_mysql_query" keep_containers_alive=1 continue_on_errors=1
# If your system has an usupported version of Python:
make local_python_version="3.8" ansible="stable-2.14" db_engine_name="mariadb" db_engine_version="10.6.11" python="3.9" connector_name="pymysql" connector_version="0.9.3"
make local_python_version="3.8" ansible="stable-2.14" db_engine_name="mariadb" db_engine_version="10.6.11" python="3.9"
```
@ -149,18 +123,10 @@ python run_all_tests.py
```
### Add a new Python, Connector or Database version
### Add a new Python or Database version
You can look into [.github/workflows/ansible-test-plugins.yml](https://github.com/ansible-collections/community.mysql/tree/main/.github/workflows)
You can look into [.github/workflows/ansible-test-plugins.yml](https://github.com/ansible-collections/community.mysql/tree/main/.github/workflows) to see how those containers are built using [build-docker-image.yml](https://github.com/ansible-collections/community.mysql/blob/main/.github/workflows/build-docker-image.yml) and all [docker-image-xxx.yml](https://github.com/ansible-collections/community.mysql/blob/main/.github/workflows/docker-image-mariadb103-py38-mysqlclient201.yml) files.
1. Add a workflow in [.github/workflows/](.github/workflows)
1. Add a new folder in [test-containers](test-containers) containing a new Dockerfile. Your container must contains 3 things:
- Python
- A connector: The python package to connect to the database (pymysql, mysqlclient, ...)
- A mysql client to prepare databases before our tests starts. This client must provide both `mysql` and `mysqldump` commands.
1. Add your version in the matrix of *.github/workflows/ansible-test-plugins.yml*. You can use [run_all_tests.py](run_all_tests.py) to help you see what the matrix will be. Simply comment out the line `os.system(make_cmd)` before runing the script. You can also add `print(len(matrix))` to display how many tests there will be on GitHub Action.
1. Ask the lead maintainer to mark your new image(s) as `public` under [https://github.com/orgs/ansible-collections/packages](https://github.com/orgs/ansible-collections/packages)
After pushing your commit to the remote, the container will be built and published on ghcr.io. Have a look in the "Action" tab to see if it worked. In case of error `failed to copy: io: read/write on closed pipe` re-run the workflow, this append unfortunately a lot.
To see the docker image produced, go to the package page in the ansible-collection namespace [https://github.com/orgs/ansible-collections/packages](https://github.com/orgs/ansible-collections/packages). This page indicate a "Published x days ago" that is updated infrequently. To see the last time the container has been updated you must click on its title and look in the right hands side bellow the title "Last published".

View file

@ -74,19 +74,7 @@ options:
- This option has no effect on MySQLdb.
type: bool
version_added: '1.1.0'
requirements:
- mysqlclient (Python 3.5+) or
- PyMySQL (Python 2.7 and Python 3.x) or
- MySQLdb (Python 2.x)
notes:
- Requires the PyMySQL (Python 2.7 and Python 3.X) or MySQL-python (Python 2.X) package installed on the remote host.
The Python package may be installed with apt-get install python-pymysql (Ubuntu; see M(ansible.builtin.apt)) or
yum install python2-PyMySQL (RHEL/CentOS/Fedora; see M(ansible.builtin.yum)). You can also use dnf install python2-PyMySQL
for newer versions of Fedora; see M(ansible.builtin.dnf).
- Be sure you have mysqlclient, PyMySQL, or MySQLdb library installed on the target machine
for the Python interpreter Ansible discovers. For example if ansible discovers and uses Python 3, you need to install
the Python 3 version of PyMySQL or mysqlclient. If ansible discovers and uses Python 2, you need to install the Python 2
version of either PyMySQL or MySQL-python.
- If you have trouble, it may help to force Ansible to use the Python interpreter you need by specifying
C(ansible_python_interpreter). For more information, see
U(https://docs.ansible.com/ansible/latest/reference_appendices/interpreter_discovery.html).
@ -99,9 +87,6 @@ notes:
and later uses the unix_socket authentication plugin by default that
without using I(login_unix_socket=/var/run/mysqld/mysqld.sock) (the default path)
causes the error ``Host '127.0.0.1' is not allowed to connect to this MariaDB server``.
- Alternatively, you can use the mysqlclient library instead of MySQL-python (MySQLdb)
which supports both Python 2.X and Python >=3.5.
See U(https://pypi.org/project/mysqlclient/) how to install it.
- "If credentials from the config file (for example, C(/root/.my.cnf)) are not needed to connect to a database server, but
the file exists and does not contain a C([client]) section, before any other valid directives, it will be read and this
will cause the connection to fail, to prevent this set it to an empty string, (for example C(config_file: ''))."

View file

@ -17,64 +17,11 @@ import os
from ansible.module_utils.six.moves import configparser
from ansible.module_utils._text import to_native
try:
import pymysql as mysql_driver
_mysql_cursor_param = 'cursor'
except ImportError:
try:
# mysqlclient is called MySQLdb
import MySQLdb as mysql_driver
import MySQLdb.cursors
_mysql_cursor_param = 'cursorclass'
except ImportError:
mysql_driver = None
mysql_driver_fail_msg = ('A MySQL module is required: for Python 2.7 either PyMySQL, or '
'MySQL-python, or for Python 3.X mysqlclient or PyMySQL. '
'Consider setting ansible_python_interpreter to use '
'the intended Python version.')
from ansible_collections.community.mysql.plugins.module_utils import pymysql as mysql_driver
from ansible_collections.community.mysql.plugins.module_utils.database import mysql_quote_identifier
def get_connector_name(connector):
""" (class) -> str
Return the name of the connector (pymysql or mysqlclient (MySQLdb))
or 'Unknown' if not pymysql or MySQLdb. When adding a
connector here, also modify get_connector_version.
"""
if connector is None or not hasattr(connector, '__name__'):
return 'Unknown'
return connector.__name__
def get_connector_version(connector):
""" (class) -> str
Return the version of pymysql or mysqlclient (MySQLdb).
Return 'Unknown' if the connector name is unknown.
"""
if connector is None:
return 'Unknown'
connector_name = get_connector_name(connector)
if connector_name == 'pymysql':
# pymysql has two methods:
# - __version__ that returns the string: 0.7.11.None
# - VERSION that returns the tuple (0, 7, 11, None)
v = connector.VERSION[:3]
return '.'.join(map(str, v))
elif connector_name == 'MySQLdb':
# version_info returns the tuple (2, 1, 1, 'final', 0)
v = connector.version_info[:3]
return '.'.join(map(str, v))
else:
return 'Unknown'
def parse_from_mysql_config_file(cnf):
# Default values of comment_prefix is '#' and ';'.
# '!' added to prevent a parsing error
@ -134,38 +81,10 @@ def mysql_connect(module, login_user=None, login_password=None, config_file='',
if connect_timeout is not None:
config['connect_timeout'] = connect_timeout
if check_hostname is not None:
if get_connector_name(mysql_driver) == 'pymysql':
version_tuple = (n for n in mysql_driver.__version__.split('.') if n != 'None')
if reduce(lambda x, y: int(x) * 100 + int(y), version_tuple) >= 711:
config['ssl']['check_hostname'] = check_hostname
else:
module.fail_json(msg='To use check_hostname, pymysql >= 0.7.11 is required on the target host')
if get_connector_name(mysql_driver) == 'pymysql':
# In case of PyMySQL driver:
if mysql_driver.version_info[0] < 1:
# for PyMySQL < 1.0.0, use 'db' instead of 'database' and 'passwd' instead of 'password'
if 'database' in config:
config['db'] = config['database']
del config['database']
if 'password' in config:
config['passwd'] = config['password']
del config['password']
db_connection = mysql_driver.connect(autocommit=autocommit, **config)
else:
# In case of MySQLdb driver
if mysql_driver.version_info[0] < 2 or (mysql_driver.version_info[0] == 2 and mysql_driver.version_info[1] < 1):
# for MySQLdb < 2.1.0, use 'db' instead of 'database' and 'passwd' instead of 'password'
if 'database' in config:
config['db'] = config['database']
del config['database']
if 'password' in config:
config['passwd'] = config['password']
del config['password']
db_connection = mysql_driver.connect(**config)
if autocommit:
db_connection.autocommit(True)
config['ssl']['check_hostname'] = check_hostname
db_connection = mysql_driver.connect(autocommit=autocommit, **config)
# Monkey patch the Connection class to close the connection when garbage collected
def _conn_patch(conn_self):
conn_self.close()
@ -173,7 +92,7 @@ def mysql_connect(module, login_user=None, login_password=None, config_file='',
# Patched
if cursor_class == 'DictCursor':
return db_connection.cursor(**{_mysql_cursor_param: mysql_driver.cursors.DictCursor}), db_connection
return db_connection.cursor(**{'cursor': mysql_driver.cursors.DictCursor}), db_connection
else:
return db_connection.cursor(), db_connection

View file

@ -0,0 +1,183 @@
"""
PyMySQL: A pure-Python MySQL client library.
Copyright (c) 2010-2016 PyMySQL contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
from .constants import FIELD_TYPE
from .err import (
Warning,
Error,
InterfaceError,
DataError,
DatabaseError,
OperationalError,
IntegrityError,
InternalError,
NotSupportedError,
ProgrammingError,
MySQLError,
)
from .times import (
Date,
Time,
Timestamp,
DateFromTicks,
TimeFromTicks,
TimestampFromTicks,
)
# PyMySQL version.
# Used by setuptools and connection_attrs
VERSION = (1, 1, 1, "final", 1)
VERSION_STRING = "1.1.1"
### for mysqlclient compatibility
### Django checks mysqlclient version.
version_info = (1, 4, 6, "final", 1)
__version__ = "1.4.6"
def get_client_info(): # for MySQLdb compatibility
return __version__
def install_as_MySQLdb():
"""
After this function is called, any application that imports MySQLdb
will unwittingly actually use pymysql.
"""
sys.modules["MySQLdb"] = sys.modules["pymysql"]
# end of mysqlclient compatibility code
threadsafety = 1
apilevel = "2.0"
paramstyle = "pyformat"
from . import connections # noqa: E402
class DBAPISet(frozenset):
def __ne__(self, other):
if isinstance(other, set):
return frozenset.__ne__(self, other)
else:
return other not in self
def __eq__(self, other):
if isinstance(other, frozenset):
return frozenset.__eq__(self, other)
else:
return other in self
def __hash__(self):
return frozenset.__hash__(self)
STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING, FIELD_TYPE.VAR_STRING])
BINARY = DBAPISet(
[
FIELD_TYPE.BLOB,
FIELD_TYPE.LONG_BLOB,
FIELD_TYPE.MEDIUM_BLOB,
FIELD_TYPE.TINY_BLOB,
]
)
NUMBER = DBAPISet(
[
FIELD_TYPE.DECIMAL,
FIELD_TYPE.DOUBLE,
FIELD_TYPE.FLOAT,
FIELD_TYPE.INT24,
FIELD_TYPE.LONG,
FIELD_TYPE.LONGLONG,
FIELD_TYPE.TINY,
FIELD_TYPE.YEAR,
]
)
DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE])
TIME = DBAPISet([FIELD_TYPE.TIME])
TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME])
DATETIME = TIMESTAMP
ROWID = DBAPISet()
def Binary(x):
"""Return x as a binary type."""
return bytes(x)
def thread_safe():
return True # match MySQLdb.thread_safe()
Connect = connect = Connection = connections.Connection
NULL = "NULL"
__all__ = [
"BINARY",
"Binary",
"Connect",
"Connection",
"DATE",
"Date",
"Time",
"Timestamp",
"DateFromTicks",
"TimeFromTicks",
"TimestampFromTicks",
"DataError",
"DatabaseError",
"Error",
"FIELD_TYPE",
"IntegrityError",
"InterfaceError",
"InternalError",
"MySQLError",
"NULL",
"NUMBER",
"NotSupportedError",
"DBAPISet",
"OperationalError",
"ProgrammingError",
"ROWID",
"STRING",
"TIME",
"TIMESTAMP",
"Warning",
"apilevel",
"connect",
"connections",
"constants",
"converters",
"cursors",
"get_client_info",
"paramstyle",
"threadsafety",
"version_info",
"install_as_MySQLdb",
"__version__",
]

View file

@ -0,0 +1,268 @@
"""
Implements auth methods
"""
from .err import OperationalError
try:
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import padding
_have_cryptography = True
except ImportError:
_have_cryptography = False
from functools import partial
import hashlib
DEBUG = False
SCRAMBLE_LENGTH = 20
sha1_new = partial(hashlib.new, "sha1")
# mysql_native_password
# https://dev.mysql.com/doc/internals/en/secure-password-authentication.html#packet-Authentication::Native41
def scramble_native_password(password, message):
"""Scramble used for mysql_native_password"""
if not password:
return b""
stage1 = sha1_new(password).digest()
stage2 = sha1_new(stage1).digest()
s = sha1_new()
s.update(message[:SCRAMBLE_LENGTH])
s.update(stage2)
result = s.digest()
return _my_crypt(result, stage1)
def _my_crypt(message1, message2):
result = bytearray(message1)
for i in range(len(result)):
result[i] ^= message2[i]
return bytes(result)
# MariaDB's client_ed25519-plugin
# https://mariadb.com/kb/en/library/connection/#client_ed25519-plugin
_nacl_bindings = False
def _init_nacl():
global _nacl_bindings
try:
from nacl import bindings
_nacl_bindings = bindings
except ImportError:
raise RuntimeError(
"'pynacl' package is required for ed25519_password auth method"
)
def _scalar_clamp(s32):
ba = bytearray(s32)
ba0 = bytes(bytearray([ba[0] & 248]))
ba31 = bytes(bytearray([(ba[31] & 127) | 64]))
return ba0 + bytes(s32[1:31]) + ba31
def ed25519_password(password, scramble):
"""Sign a random scramble with elliptic curve Ed25519.
Secret and public key are derived from password.
"""
# variable names based on rfc8032 section-5.1.6
#
if not _nacl_bindings:
_init_nacl()
# h = SHA512(password)
h = hashlib.sha512(password).digest()
# s = prune(first_half(h))
s = _scalar_clamp(h[:32])
# r = SHA512(second_half(h) || M)
r = hashlib.sha512(h[32:] + scramble).digest()
# R = encoded point [r]B
r = _nacl_bindings.crypto_core_ed25519_scalar_reduce(r)
R = _nacl_bindings.crypto_scalarmult_ed25519_base_noclamp(r)
# A = encoded point [s]B
A = _nacl_bindings.crypto_scalarmult_ed25519_base_noclamp(s)
# k = SHA512(R || A || M)
k = hashlib.sha512(R + A + scramble).digest()
# S = (k * s + r) mod L
k = _nacl_bindings.crypto_core_ed25519_scalar_reduce(k)
ks = _nacl_bindings.crypto_core_ed25519_scalar_mul(k, s)
S = _nacl_bindings.crypto_core_ed25519_scalar_add(ks, r)
# signature = R || S
return R + S
# sha256_password
def _roundtrip(conn, send_data):
conn.write_packet(send_data)
pkt = conn._read_packet()
pkt.check_error()
return pkt
def _xor_password(password, salt):
# Trailing NUL character will be added in Auth Switch Request.
# See https://github.com/mysql/mysql-server/blob/7d10c82196c8e45554f27c00681474a9fb86d137/sql/auth/sha2_password.cc#L939-L945
salt = salt[:SCRAMBLE_LENGTH]
password_bytes = bytearray(password)
# salt = bytearray(salt) # for PY2 compat.
salt_len = len(salt)
for i in range(len(password_bytes)):
password_bytes[i] ^= salt[i % salt_len]
return bytes(password_bytes)
def sha2_rsa_encrypt(password, salt, public_key):
"""Encrypt password with salt and public_key.
Used for sha256_password and caching_sha2_password.
"""
if not _have_cryptography:
raise RuntimeError(
"'cryptography' package is required for sha256_password or"
+ " caching_sha2_password auth methods"
)
message = _xor_password(password + b"\0", salt)
rsa_key = serialization.load_pem_public_key(public_key, default_backend())
return rsa_key.encrypt(
message,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None,
),
)
def sha256_password_auth(conn, pkt):
if conn._secure:
if DEBUG:
print("sha256: Sending plain password")
data = conn.password + b"\0"
return _roundtrip(conn, data)
if pkt.is_auth_switch_request():
conn.salt = pkt.read_all()
if not conn.server_public_key and conn.password:
# Request server public key
if DEBUG:
print("sha256: Requesting server public key")
pkt = _roundtrip(conn, b"\1")
if pkt.is_extra_auth_data():
conn.server_public_key = pkt._data[1:]
if DEBUG:
print("Received public key:\n", conn.server_public_key.decode("ascii"))
if conn.password:
if not conn.server_public_key:
raise OperationalError("Couldn't receive server's public key")
data = sha2_rsa_encrypt(conn.password, conn.salt, conn.server_public_key)
else:
data = b""
return _roundtrip(conn, data)
def scramble_caching_sha2(password, nonce):
# (bytes, bytes) -> bytes
"""Scramble algorithm used in cached_sha2_password fast path.
XOR(SHA256(password), SHA256(SHA256(SHA256(password)), nonce))
"""
if not password:
return b""
p1 = hashlib.sha256(password).digest()
p2 = hashlib.sha256(p1).digest()
p3 = hashlib.sha256(p2 + nonce).digest()
res = bytearray(p1)
for i in range(len(p3)):
res[i] ^= p3[i]
return bytes(res)
def caching_sha2_password_auth(conn, pkt):
# No password fast path
if not conn.password:
return _roundtrip(conn, b"")
if pkt.is_auth_switch_request():
# Try from fast auth
if DEBUG:
print("caching sha2: Trying fast path")
conn.salt = pkt.read_all()
scrambled = scramble_caching_sha2(conn.password, conn.salt)
pkt = _roundtrip(conn, scrambled)
# else: fast auth is tried in initial handshake
if not pkt.is_extra_auth_data():
raise OperationalError(
"caching sha2: Unknown packet for fast auth: %s" % pkt._data[:1]
)
# magic numbers:
# 2 - request public key
# 3 - fast auth succeeded
# 4 - need full auth
pkt.advance(1)
n = pkt.read_uint8()
if n == 3:
if DEBUG:
print("caching sha2: succeeded by fast path.")
pkt = conn._read_packet()
pkt.check_error() # pkt must be OK packet
return pkt
if n != 4:
raise OperationalError("caching sha2: Unknown result for fast auth: %s" % n)
if DEBUG:
print("caching sha2: Trying full auth...")
if conn._secure:
if DEBUG:
print("caching sha2: Sending plain password via secure connection")
return _roundtrip(conn, conn.password + b"\0")
if not conn.server_public_key:
pkt = _roundtrip(conn, b"\x02") # Request public key
if not pkt.is_extra_auth_data():
raise OperationalError(
"caching sha2: Unknown packet for public key: %s" % pkt._data[:1]
)
conn.server_public_key = pkt._data[1:]
if DEBUG:
print(conn.server_public_key.decode("ascii"))
data = sha2_rsa_encrypt(conn.password, conn.salt, conn.server_public_key)
pkt = _roundtrip(conn, data)

View file

@ -0,0 +1,216 @@
# Internal use only. Do not use directly.
MBLENGTH = {8: 1, 33: 3, 88: 2, 91: 2}
class Charset:
def __init__(self, id, name, collation, is_default=False):
self.id, self.name, self.collation = id, name, collation
self.is_default = is_default
def __repr__(self):
return (
f"Charset(id={self.id}, name={self.name!r}, collation={self.collation!r})"
)
@property
def encoding(self):
name = self.name
if name in ("utf8mb4", "utf8mb3"):
return "utf8"
if name == "latin1":
return "cp1252"
if name == "koi8r":
return "koi8_r"
if name == "koi8u":
return "koi8_u"
return name
@property
def is_binary(self):
return self.id == 63
class Charsets:
def __init__(self):
self._by_id = {}
self._by_name = {}
def add(self, c):
self._by_id[c.id] = c
if c.is_default:
self._by_name[c.name] = c
def by_id(self, id):
return self._by_id[id]
def by_name(self, name):
if name == "utf8":
name = "utf8mb4"
return self._by_name.get(name.lower())
_charsets = Charsets()
charset_by_name = _charsets.by_name
charset_by_id = _charsets.by_id
"""
TODO: update this script.
Generated with:
mysql -N -s -e "select id, character_set_name, collation_name, is_default
from information_schema.collations order by id;" | python -c "import sys
for l in sys.stdin.readlines():
id, name, collation, is_default = l.split(chr(9))
if is_default.strip() == "Yes":
print('_charsets.add(Charset(%s, \'%s\', \'%s\', True))' \
% (id, name, collation))
else:
print('_charsets.add(Charset(%s, \'%s\', \'%s\'))' \
% (id, name, collation, bool(is_default.strip()))
"""
_charsets.add(Charset(1, "big5", "big5_chinese_ci", True))
_charsets.add(Charset(2, "latin2", "latin2_czech_cs"))
_charsets.add(Charset(3, "dec8", "dec8_swedish_ci", True))
_charsets.add(Charset(4, "cp850", "cp850_general_ci", True))
_charsets.add(Charset(5, "latin1", "latin1_german1_ci"))
_charsets.add(Charset(6, "hp8", "hp8_english_ci", True))
_charsets.add(Charset(7, "koi8r", "koi8r_general_ci", True))
_charsets.add(Charset(8, "latin1", "latin1_swedish_ci", True))
_charsets.add(Charset(9, "latin2", "latin2_general_ci", True))
_charsets.add(Charset(10, "swe7", "swe7_swedish_ci", True))
_charsets.add(Charset(11, "ascii", "ascii_general_ci", True))
_charsets.add(Charset(12, "ujis", "ujis_japanese_ci", True))
_charsets.add(Charset(13, "sjis", "sjis_japanese_ci", True))
_charsets.add(Charset(14, "cp1251", "cp1251_bulgarian_ci"))
_charsets.add(Charset(15, "latin1", "latin1_danish_ci"))
_charsets.add(Charset(16, "hebrew", "hebrew_general_ci", True))
_charsets.add(Charset(18, "tis620", "tis620_thai_ci", True))
_charsets.add(Charset(19, "euckr", "euckr_korean_ci", True))
_charsets.add(Charset(20, "latin7", "latin7_estonian_cs"))
_charsets.add(Charset(21, "latin2", "latin2_hungarian_ci"))
_charsets.add(Charset(22, "koi8u", "koi8u_general_ci", True))
_charsets.add(Charset(23, "cp1251", "cp1251_ukrainian_ci"))
_charsets.add(Charset(24, "gb2312", "gb2312_chinese_ci", True))
_charsets.add(Charset(25, "greek", "greek_general_ci", True))
_charsets.add(Charset(26, "cp1250", "cp1250_general_ci", True))
_charsets.add(Charset(27, "latin2", "latin2_croatian_ci"))
_charsets.add(Charset(28, "gbk", "gbk_chinese_ci", True))
_charsets.add(Charset(29, "cp1257", "cp1257_lithuanian_ci"))
_charsets.add(Charset(30, "latin5", "latin5_turkish_ci", True))
_charsets.add(Charset(31, "latin1", "latin1_german2_ci"))
_charsets.add(Charset(32, "armscii8", "armscii8_general_ci", True))
_charsets.add(Charset(33, "utf8mb3", "utf8mb3_general_ci", True))
_charsets.add(Charset(34, "cp1250", "cp1250_czech_cs"))
_charsets.add(Charset(36, "cp866", "cp866_general_ci", True))
_charsets.add(Charset(37, "keybcs2", "keybcs2_general_ci", True))
_charsets.add(Charset(38, "macce", "macce_general_ci", True))
_charsets.add(Charset(39, "macroman", "macroman_general_ci", True))
_charsets.add(Charset(40, "cp852", "cp852_general_ci", True))
_charsets.add(Charset(41, "latin7", "latin7_general_ci", True))
_charsets.add(Charset(42, "latin7", "latin7_general_cs"))
_charsets.add(Charset(43, "macce", "macce_bin"))
_charsets.add(Charset(44, "cp1250", "cp1250_croatian_ci"))
_charsets.add(Charset(45, "utf8mb4", "utf8mb4_general_ci", True))
_charsets.add(Charset(46, "utf8mb4", "utf8mb4_bin"))
_charsets.add(Charset(47, "latin1", "latin1_bin"))
_charsets.add(Charset(48, "latin1", "latin1_general_ci"))
_charsets.add(Charset(49, "latin1", "latin1_general_cs"))
_charsets.add(Charset(50, "cp1251", "cp1251_bin"))
_charsets.add(Charset(51, "cp1251", "cp1251_general_ci", True))
_charsets.add(Charset(52, "cp1251", "cp1251_general_cs"))
_charsets.add(Charset(53, "macroman", "macroman_bin"))
_charsets.add(Charset(57, "cp1256", "cp1256_general_ci", True))
_charsets.add(Charset(58, "cp1257", "cp1257_bin"))
_charsets.add(Charset(59, "cp1257", "cp1257_general_ci", True))
_charsets.add(Charset(63, "binary", "binary", True))
_charsets.add(Charset(64, "armscii8", "armscii8_bin"))
_charsets.add(Charset(65, "ascii", "ascii_bin"))
_charsets.add(Charset(66, "cp1250", "cp1250_bin"))
_charsets.add(Charset(67, "cp1256", "cp1256_bin"))
_charsets.add(Charset(68, "cp866", "cp866_bin"))
_charsets.add(Charset(69, "dec8", "dec8_bin"))
_charsets.add(Charset(70, "greek", "greek_bin"))
_charsets.add(Charset(71, "hebrew", "hebrew_bin"))
_charsets.add(Charset(72, "hp8", "hp8_bin"))
_charsets.add(Charset(73, "keybcs2", "keybcs2_bin"))
_charsets.add(Charset(74, "koi8r", "koi8r_bin"))
_charsets.add(Charset(75, "koi8u", "koi8u_bin"))
_charsets.add(Charset(76, "utf8mb3", "utf8mb3_tolower_ci"))
_charsets.add(Charset(77, "latin2", "latin2_bin"))
_charsets.add(Charset(78, "latin5", "latin5_bin"))
_charsets.add(Charset(79, "latin7", "latin7_bin"))
_charsets.add(Charset(80, "cp850", "cp850_bin"))
_charsets.add(Charset(81, "cp852", "cp852_bin"))
_charsets.add(Charset(82, "swe7", "swe7_bin"))
_charsets.add(Charset(83, "utf8mb3", "utf8mb3_bin"))
_charsets.add(Charset(84, "big5", "big5_bin"))
_charsets.add(Charset(85, "euckr", "euckr_bin"))
_charsets.add(Charset(86, "gb2312", "gb2312_bin"))
_charsets.add(Charset(87, "gbk", "gbk_bin"))
_charsets.add(Charset(88, "sjis", "sjis_bin"))
_charsets.add(Charset(89, "tis620", "tis620_bin"))
_charsets.add(Charset(91, "ujis", "ujis_bin"))
_charsets.add(Charset(92, "geostd8", "geostd8_general_ci", True))
_charsets.add(Charset(93, "geostd8", "geostd8_bin"))
_charsets.add(Charset(94, "latin1", "latin1_spanish_ci"))
_charsets.add(Charset(95, "cp932", "cp932_japanese_ci", True))
_charsets.add(Charset(96, "cp932", "cp932_bin"))
_charsets.add(Charset(97, "eucjpms", "eucjpms_japanese_ci", True))
_charsets.add(Charset(98, "eucjpms", "eucjpms_bin"))
_charsets.add(Charset(99, "cp1250", "cp1250_polish_ci"))
_charsets.add(Charset(192, "utf8mb3", "utf8mb3_unicode_ci"))
_charsets.add(Charset(193, "utf8mb3", "utf8mb3_icelandic_ci"))
_charsets.add(Charset(194, "utf8mb3", "utf8mb3_latvian_ci"))
_charsets.add(Charset(195, "utf8mb3", "utf8mb3_romanian_ci"))
_charsets.add(Charset(196, "utf8mb3", "utf8mb3_slovenian_ci"))
_charsets.add(Charset(197, "utf8mb3", "utf8mb3_polish_ci"))
_charsets.add(Charset(198, "utf8mb3", "utf8mb3_estonian_ci"))
_charsets.add(Charset(199, "utf8mb3", "utf8mb3_spanish_ci"))
_charsets.add(Charset(200, "utf8mb3", "utf8mb3_swedish_ci"))
_charsets.add(Charset(201, "utf8mb3", "utf8mb3_turkish_ci"))
_charsets.add(Charset(202, "utf8mb3", "utf8mb3_czech_ci"))
_charsets.add(Charset(203, "utf8mb3", "utf8mb3_danish_ci"))
_charsets.add(Charset(204, "utf8mb3", "utf8mb3_lithuanian_ci"))
_charsets.add(Charset(205, "utf8mb3", "utf8mb3_slovak_ci"))
_charsets.add(Charset(206, "utf8mb3", "utf8mb3_spanish2_ci"))
_charsets.add(Charset(207, "utf8mb3", "utf8mb3_roman_ci"))
_charsets.add(Charset(208, "utf8mb3", "utf8mb3_persian_ci"))
_charsets.add(Charset(209, "utf8mb3", "utf8mb3_esperanto_ci"))
_charsets.add(Charset(210, "utf8mb3", "utf8mb3_hungarian_ci"))
_charsets.add(Charset(211, "utf8mb3", "utf8mb3_sinhala_ci"))
_charsets.add(Charset(212, "utf8mb3", "utf8mb3_german2_ci"))
_charsets.add(Charset(213, "utf8mb3", "utf8mb3_croatian_ci"))
_charsets.add(Charset(214, "utf8mb3", "utf8mb3_unicode_520_ci"))
_charsets.add(Charset(215, "utf8mb3", "utf8mb3_vietnamese_ci"))
_charsets.add(Charset(223, "utf8mb3", "utf8mb3_general_mysql500_ci"))
_charsets.add(Charset(224, "utf8mb4", "utf8mb4_unicode_ci"))
_charsets.add(Charset(225, "utf8mb4", "utf8mb4_icelandic_ci"))
_charsets.add(Charset(226, "utf8mb4", "utf8mb4_latvian_ci"))
_charsets.add(Charset(227, "utf8mb4", "utf8mb4_romanian_ci"))
_charsets.add(Charset(228, "utf8mb4", "utf8mb4_slovenian_ci"))
_charsets.add(Charset(229, "utf8mb4", "utf8mb4_polish_ci"))
_charsets.add(Charset(230, "utf8mb4", "utf8mb4_estonian_ci"))
_charsets.add(Charset(231, "utf8mb4", "utf8mb4_spanish_ci"))
_charsets.add(Charset(232, "utf8mb4", "utf8mb4_swedish_ci"))
_charsets.add(Charset(233, "utf8mb4", "utf8mb4_turkish_ci"))
_charsets.add(Charset(234, "utf8mb4", "utf8mb4_czech_ci"))
_charsets.add(Charset(235, "utf8mb4", "utf8mb4_danish_ci"))
_charsets.add(Charset(236, "utf8mb4", "utf8mb4_lithuanian_ci"))
_charsets.add(Charset(237, "utf8mb4", "utf8mb4_slovak_ci"))
_charsets.add(Charset(238, "utf8mb4", "utf8mb4_spanish2_ci"))
_charsets.add(Charset(239, "utf8mb4", "utf8mb4_roman_ci"))
_charsets.add(Charset(240, "utf8mb4", "utf8mb4_persian_ci"))
_charsets.add(Charset(241, "utf8mb4", "utf8mb4_esperanto_ci"))
_charsets.add(Charset(242, "utf8mb4", "utf8mb4_hungarian_ci"))
_charsets.add(Charset(243, "utf8mb4", "utf8mb4_sinhala_ci"))
_charsets.add(Charset(244, "utf8mb4", "utf8mb4_german2_ci"))
_charsets.add(Charset(245, "utf8mb4", "utf8mb4_croatian_ci"))
_charsets.add(Charset(246, "utf8mb4", "utf8mb4_unicode_520_ci"))
_charsets.add(Charset(247, "utf8mb4", "utf8mb4_vietnamese_ci"))
_charsets.add(Charset(248, "gb18030", "gb18030_chinese_ci", True))
_charsets.add(Charset(249, "gb18030", "gb18030_bin"))
_charsets.add(Charset(250, "gb18030", "gb18030_unicode_520_ci"))
_charsets.add(Charset(255, "utf8mb4", "utf8mb4_0900_ai_ci"))

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,38 @@
# https://dev.mysql.com/doc/internals/en/capability-flags.html#packet-Protocol::CapabilityFlags
LONG_PASSWORD = 1
FOUND_ROWS = 1 << 1
LONG_FLAG = 1 << 2
CONNECT_WITH_DB = 1 << 3
NO_SCHEMA = 1 << 4
COMPRESS = 1 << 5
ODBC = 1 << 6
LOCAL_FILES = 1 << 7
IGNORE_SPACE = 1 << 8
PROTOCOL_41 = 1 << 9
INTERACTIVE = 1 << 10
SSL = 1 << 11
IGNORE_SIGPIPE = 1 << 12
TRANSACTIONS = 1 << 13
SECURE_CONNECTION = 1 << 15
MULTI_STATEMENTS = 1 << 16
MULTI_RESULTS = 1 << 17
PS_MULTI_RESULTS = 1 << 18
PLUGIN_AUTH = 1 << 19
CONNECT_ATTRS = 1 << 20
PLUGIN_AUTH_LENENC_CLIENT_DATA = 1 << 21
CAPABILITIES = (
LONG_PASSWORD
| LONG_FLAG
| PROTOCOL_41
| TRANSACTIONS
| SECURE_CONNECTION
| MULTI_RESULTS
| PLUGIN_AUTH
| PLUGIN_AUTH_LENENC_CLIENT_DATA
| CONNECT_ATTRS
)
# Not done yet
HANDLE_EXPIRED_PASSWORDS = 1 << 22
SESSION_TRACK = 1 << 23
DEPRECATE_EOF = 1 << 24

View file

@ -0,0 +1,32 @@
COM_SLEEP = 0x00
COM_QUIT = 0x01
COM_INIT_DB = 0x02
COM_QUERY = 0x03
COM_FIELD_LIST = 0x04
COM_CREATE_DB = 0x05
COM_DROP_DB = 0x06
COM_REFRESH = 0x07
COM_SHUTDOWN = 0x08
COM_STATISTICS = 0x09
COM_PROCESS_INFO = 0x0A
COM_CONNECT = 0x0B
COM_PROCESS_KILL = 0x0C
COM_DEBUG = 0x0D
COM_PING = 0x0E
COM_TIME = 0x0F
COM_DELAYED_INSERT = 0x10
COM_CHANGE_USER = 0x11
COM_BINLOG_DUMP = 0x12
COM_TABLE_DUMP = 0x13
COM_CONNECT_OUT = 0x14
COM_REGISTER_SLAVE = 0x15
COM_STMT_PREPARE = 0x16
COM_STMT_EXECUTE = 0x17
COM_STMT_SEND_LONG_DATA = 0x18
COM_STMT_CLOSE = 0x19
COM_STMT_RESET = 0x1A
COM_SET_OPTION = 0x1B
COM_STMT_FETCH = 0x1C
COM_DAEMON = 0x1D
COM_BINLOG_DUMP_GTID = 0x1E
COM_END = 0x1F

View file

@ -0,0 +1,79 @@
# flake8: noqa
# errmsg.h
CR_ERROR_FIRST = 2000
CR_UNKNOWN_ERROR = 2000
CR_SOCKET_CREATE_ERROR = 2001
CR_CONNECTION_ERROR = 2002
CR_CONN_HOST_ERROR = 2003
CR_IPSOCK_ERROR = 2004
CR_UNKNOWN_HOST = 2005
CR_SERVER_GONE_ERROR = 2006
CR_VERSION_ERROR = 2007
CR_OUT_OF_MEMORY = 2008
CR_WRONG_HOST_INFO = 2009
CR_LOCALHOST_CONNECTION = 2010
CR_TCP_CONNECTION = 2011
CR_SERVER_HANDSHAKE_ERR = 2012
CR_SERVER_LOST = 2013
CR_COMMANDS_OUT_OF_SYNC = 2014
CR_NAMEDPIPE_CONNECTION = 2015
CR_NAMEDPIPEWAIT_ERROR = 2016
CR_NAMEDPIPEOPEN_ERROR = 2017
CR_NAMEDPIPESETSTATE_ERROR = 2018
CR_CANT_READ_CHARSET = 2019
CR_NET_PACKET_TOO_LARGE = 2020
CR_EMBEDDED_CONNECTION = 2021
CR_PROBE_SLAVE_STATUS = 2022
CR_PROBE_SLAVE_HOSTS = 2023
CR_PROBE_SLAVE_CONNECT = 2024
CR_PROBE_MASTER_CONNECT = 2025
CR_SSL_CONNECTION_ERROR = 2026
CR_MALFORMED_PACKET = 2027
CR_WRONG_LICENSE = 2028
CR_NULL_POINTER = 2029
CR_NO_PREPARE_STMT = 2030
CR_PARAMS_NOT_BOUND = 2031
CR_DATA_TRUNCATED = 2032
CR_NO_PARAMETERS_EXISTS = 2033
CR_INVALID_PARAMETER_NO = 2034
CR_INVALID_BUFFER_USE = 2035
CR_UNSUPPORTED_PARAM_TYPE = 2036
CR_SHARED_MEMORY_CONNECTION = 2037
CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042
CR_SHARED_MEMORY_MAP_ERROR = 2043
CR_SHARED_MEMORY_EVENT_ERROR = 2044
CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046
CR_CONN_UNKNOW_PROTOCOL = 2047
CR_INVALID_CONN_HANDLE = 2048
CR_SECURE_AUTH = 2049
CR_FETCH_CANCELED = 2050
CR_NO_DATA = 2051
CR_NO_STMT_METADATA = 2052
CR_NO_RESULT_SET = 2053
CR_NOT_IMPLEMENTED = 2054
CR_SERVER_LOST_EXTENDED = 2055
CR_STMT_CLOSED = 2056
CR_NEW_STMT_METADATA = 2057
CR_ALREADY_CONNECTED = 2058
CR_AUTH_PLUGIN_CANNOT_LOAD = 2059
CR_DUPLICATE_CONNECTION_ATTR = 2060
CR_AUTH_PLUGIN_ERR = 2061
CR_INSECURE_API_ERR = 2062
CR_FILE_NAME_TOO_LONG = 2063
CR_SSL_FIPS_MODE_ERR = 2064
CR_DEPRECATED_COMPRESSION_NOT_SUPPORTED = 2065
CR_COMPRESSION_WRONGLY_CONFIGURED = 2066
CR_KERBEROS_USER_NOT_FOUND = 2067
CR_LOAD_DATA_LOCAL_INFILE_REJECTED = 2068
CR_LOAD_DATA_LOCAL_INFILE_REALPATH_FAIL = 2069
CR_DNS_SRV_LOOKUP_FAILED = 2070
CR_MANDATORY_TRACKER_NOT_FOUND = 2071
CR_INVALID_FACTOR_NO = 2072
CR_ERROR_LAST = 2072

View file

@ -0,0 +1,477 @@
ERROR_FIRST = 1000
HASHCHK = 1000
NISAMCHK = 1001
NO = 1002
YES = 1003
CANT_CREATE_FILE = 1004
CANT_CREATE_TABLE = 1005
CANT_CREATE_DB = 1006
DB_CREATE_EXISTS = 1007
DB_DROP_EXISTS = 1008
DB_DROP_DELETE = 1009
DB_DROP_RMDIR = 1010
CANT_DELETE_FILE = 1011
CANT_FIND_SYSTEM_REC = 1012
CANT_GET_STAT = 1013
CANT_GET_WD = 1014
CANT_LOCK = 1015
CANT_OPEN_FILE = 1016
FILE_NOT_FOUND = 1017
CANT_READ_DIR = 1018
CANT_SET_WD = 1019
CHECKREAD = 1020
DISK_FULL = 1021
DUP_KEY = 1022
ERROR_ON_CLOSE = 1023
ERROR_ON_READ = 1024
ERROR_ON_RENAME = 1025
ERROR_ON_WRITE = 1026
FILE_USED = 1027
FILSORT_ABORT = 1028
FORM_NOT_FOUND = 1029
GET_ERRNO = 1030
ILLEGAL_HA = 1031
KEY_NOT_FOUND = 1032
NOT_FORM_FILE = 1033
NOT_KEYFILE = 1034
OLD_KEYFILE = 1035
OPEN_AS_READONLY = 1036
OUTOFMEMORY = 1037
OUT_OF_SORTMEMORY = 1038
UNEXPECTED_EOF = 1039
CON_COUNT_ERROR = 1040
OUT_OF_RESOURCES = 1041
BAD_HOST_ERROR = 1042
HANDSHAKE_ERROR = 1043
DBACCESS_DENIED_ERROR = 1044
ACCESS_DENIED_ERROR = 1045
NO_DB_ERROR = 1046
UNKNOWN_COM_ERROR = 1047
BAD_NULL_ERROR = 1048
BAD_DB_ERROR = 1049
TABLE_EXISTS_ERROR = 1050
BAD_TABLE_ERROR = 1051
NON_UNIQ_ERROR = 1052
SERVER_SHUTDOWN = 1053
BAD_FIELD_ERROR = 1054
WRONG_FIELD_WITH_GROUP = 1055
WRONG_GROUP_FIELD = 1056
WRONG_SUM_SELECT = 1057
WRONG_VALUE_COUNT = 1058
TOO_LONG_IDENT = 1059
DUP_FIELDNAME = 1060
DUP_KEYNAME = 1061
DUP_ENTRY = 1062
WRONG_FIELD_SPEC = 1063
PARSE_ERROR = 1064
EMPTY_QUERY = 1065
NONUNIQ_TABLE = 1066
INVALID_DEFAULT = 1067
MULTIPLE_PRI_KEY = 1068
TOO_MANY_KEYS = 1069
TOO_MANY_KEY_PARTS = 1070
TOO_LONG_KEY = 1071
KEY_COLUMN_DOES_NOT_EXITS = 1072
BLOB_USED_AS_KEY = 1073
TOO_BIG_FIELDLENGTH = 1074
WRONG_AUTO_KEY = 1075
READY = 1076
NORMAL_SHUTDOWN = 1077
GOT_SIGNAL = 1078
SHUTDOWN_COMPLETE = 1079
FORCING_CLOSE = 1080
IPSOCK_ERROR = 1081
NO_SUCH_INDEX = 1082
WRONG_FIELD_TERMINATORS = 1083
BLOBS_AND_NO_TERMINATED = 1084
TEXTFILE_NOT_READABLE = 1085
FILE_EXISTS_ERROR = 1086
LOAD_INFO = 1087
ALTER_INFO = 1088
WRONG_SUB_KEY = 1089
CANT_REMOVE_ALL_FIELDS = 1090
CANT_DROP_FIELD_OR_KEY = 1091
INSERT_INFO = 1092
UPDATE_TABLE_USED = 1093
NO_SUCH_THREAD = 1094
KILL_DENIED_ERROR = 1095
NO_TABLES_USED = 1096
TOO_BIG_SET = 1097
NO_UNIQUE_LOGFILE = 1098
TABLE_NOT_LOCKED_FOR_WRITE = 1099
TABLE_NOT_LOCKED = 1100
BLOB_CANT_HAVE_DEFAULT = 1101
WRONG_DB_NAME = 1102
WRONG_TABLE_NAME = 1103
TOO_BIG_SELECT = 1104
UNKNOWN_ERROR = 1105
UNKNOWN_PROCEDURE = 1106
WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
WRONG_PARAMETERS_TO_PROCEDURE = 1108
UNKNOWN_TABLE = 1109
FIELD_SPECIFIED_TWICE = 1110
INVALID_GROUP_FUNC_USE = 1111
UNSUPPORTED_EXTENSION = 1112
TABLE_MUST_HAVE_COLUMNS = 1113
RECORD_FILE_FULL = 1114
UNKNOWN_CHARACTER_SET = 1115
TOO_MANY_TABLES = 1116
TOO_MANY_FIELDS = 1117
TOO_BIG_ROWSIZE = 1118
STACK_OVERRUN = 1119
WRONG_OUTER_JOIN = 1120
NULL_COLUMN_IN_INDEX = 1121
CANT_FIND_UDF = 1122
CANT_INITIALIZE_UDF = 1123
UDF_NO_PATHS = 1124
UDF_EXISTS = 1125
CANT_OPEN_LIBRARY = 1126
CANT_FIND_DL_ENTRY = 1127
FUNCTION_NOT_DEFINED = 1128
HOST_IS_BLOCKED = 1129
HOST_NOT_PRIVILEGED = 1130
PASSWORD_ANONYMOUS_USER = 1131
PASSWORD_NOT_ALLOWED = 1132
PASSWORD_NO_MATCH = 1133
UPDATE_INFO = 1134
CANT_CREATE_THREAD = 1135
WRONG_VALUE_COUNT_ON_ROW = 1136
CANT_REOPEN_TABLE = 1137
INVALID_USE_OF_NULL = 1138
REGEXP_ERROR = 1139
MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
NONEXISTING_GRANT = 1141
TABLEACCESS_DENIED_ERROR = 1142
COLUMNACCESS_DENIED_ERROR = 1143
ILLEGAL_GRANT_FOR_TABLE = 1144
GRANT_WRONG_HOST_OR_USER = 1145
NO_SUCH_TABLE = 1146
NONEXISTING_TABLE_GRANT = 1147
NOT_ALLOWED_COMMAND = 1148
SYNTAX_ERROR = 1149
DELAYED_CANT_CHANGE_LOCK = 1150
TOO_MANY_DELAYED_THREADS = 1151
ABORTING_CONNECTION = 1152
NET_PACKET_TOO_LARGE = 1153
NET_READ_ERROR_FROM_PIPE = 1154
NET_FCNTL_ERROR = 1155
NET_PACKETS_OUT_OF_ORDER = 1156
NET_UNCOMPRESS_ERROR = 1157
NET_READ_ERROR = 1158
NET_READ_INTERRUPTED = 1159
NET_ERROR_ON_WRITE = 1160
NET_WRITE_INTERRUPTED = 1161
TOO_LONG_STRING = 1162
TABLE_CANT_HANDLE_BLOB = 1163
TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
DELAYED_INSERT_TABLE_LOCKED = 1165
WRONG_COLUMN_NAME = 1166
WRONG_KEY_COLUMN = 1167
WRONG_MRG_TABLE = 1168
DUP_UNIQUE = 1169
BLOB_KEY_WITHOUT_LENGTH = 1170
PRIMARY_CANT_HAVE_NULL = 1171
TOO_MANY_ROWS = 1172
REQUIRES_PRIMARY_KEY = 1173
NO_RAID_COMPILED = 1174
UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
KEY_DOES_NOT_EXITS = 1176
CHECK_NO_SUCH_TABLE = 1177
CHECK_NOT_IMPLEMENTED = 1178
CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
ERROR_DURING_COMMIT = 1180
ERROR_DURING_ROLLBACK = 1181
ERROR_DURING_FLUSH_LOGS = 1182
ERROR_DURING_CHECKPOINT = 1183
NEW_ABORTING_CONNECTION = 1184
DUMP_NOT_IMPLEMENTED = 1185
FLUSH_MASTER_BINLOG_CLOSED = 1186
INDEX_REBUILD = 1187
MASTER = 1188
MASTER_NET_READ = 1189
MASTER_NET_WRITE = 1190
FT_MATCHING_KEY_NOT_FOUND = 1191
LOCK_OR_ACTIVE_TRANSACTION = 1192
UNKNOWN_SYSTEM_VARIABLE = 1193
CRASHED_ON_USAGE = 1194
CRASHED_ON_REPAIR = 1195
WARNING_NOT_COMPLETE_ROLLBACK = 1196
TRANS_CACHE_FULL = 1197
SLAVE_MUST_STOP = 1198
SLAVE_NOT_RUNNING = 1199
BAD_SLAVE = 1200
MASTER_INFO = 1201
SLAVE_THREAD = 1202
TOO_MANY_USER_CONNECTIONS = 1203
SET_CONSTANTS_ONLY = 1204
LOCK_WAIT_TIMEOUT = 1205
LOCK_TABLE_FULL = 1206
READ_ONLY_TRANSACTION = 1207
DROP_DB_WITH_READ_LOCK = 1208
CREATE_DB_WITH_READ_LOCK = 1209
WRONG_ARGUMENTS = 1210
NO_PERMISSION_TO_CREATE_USER = 1211
UNION_TABLES_IN_DIFFERENT_DIR = 1212
LOCK_DEADLOCK = 1213
TABLE_CANT_HANDLE_FT = 1214
CANNOT_ADD_FOREIGN = 1215
NO_REFERENCED_ROW = 1216
ROW_IS_REFERENCED = 1217
CONNECT_TO_MASTER = 1218
QUERY_ON_MASTER = 1219
ERROR_WHEN_EXECUTING_COMMAND = 1220
WRONG_USAGE = 1221
WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
CANT_UPDATE_WITH_READLOCK = 1223
MIXING_NOT_ALLOWED = 1224
DUP_ARGUMENT = 1225
USER_LIMIT_REACHED = 1226
SPECIFIC_ACCESS_DENIED_ERROR = 1227
LOCAL_VARIABLE = 1228
GLOBAL_VARIABLE = 1229
NO_DEFAULT = 1230
WRONG_VALUE_FOR_VAR = 1231
WRONG_TYPE_FOR_VAR = 1232
VAR_CANT_BE_READ = 1233
CANT_USE_OPTION_HERE = 1234
NOT_SUPPORTED_YET = 1235
MASTER_FATAL_ERROR_READING_BINLOG = 1236
SLAVE_IGNORED_TABLE = 1237
INCORRECT_GLOBAL_LOCAL_VAR = 1238
WRONG_FK_DEF = 1239
KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
OPERAND_COLUMNS = 1241
SUBQUERY_NO_1_ROW = 1242
UNKNOWN_STMT_HANDLER = 1243
CORRUPT_HELP_DB = 1244
CYCLIC_REFERENCE = 1245
AUTO_CONVERT = 1246
ILLEGAL_REFERENCE = 1247
DERIVED_MUST_HAVE_ALIAS = 1248
SELECT_REDUCED = 1249
TABLENAME_NOT_ALLOWED_HERE = 1250
NOT_SUPPORTED_AUTH_MODE = 1251
SPATIAL_CANT_HAVE_NULL = 1252
COLLATION_CHARSET_MISMATCH = 1253
SLAVE_WAS_RUNNING = 1254
SLAVE_WAS_NOT_RUNNING = 1255
TOO_BIG_FOR_UNCOMPRESS = 1256
ZLIB_Z_MEM_ERROR = 1257
ZLIB_Z_BUF_ERROR = 1258
ZLIB_Z_DATA_ERROR = 1259
CUT_VALUE_GROUP_CONCAT = 1260
WARN_TOO_FEW_RECORDS = 1261
WARN_TOO_MANY_RECORDS = 1262
WARN_NULL_TO_NOTNULL = 1263
WARN_DATA_OUT_OF_RANGE = 1264
WARN_DATA_TRUNCATED = 1265
WARN_USING_OTHER_HANDLER = 1266
CANT_AGGREGATE_2COLLATIONS = 1267
DROP_USER = 1268
REVOKE_GRANTS = 1269
CANT_AGGREGATE_3COLLATIONS = 1270
CANT_AGGREGATE_NCOLLATIONS = 1271
VARIABLE_IS_NOT_STRUCT = 1272
UNKNOWN_COLLATION = 1273
SLAVE_IGNORED_SSL_PARAMS = 1274
SERVER_IS_IN_SECURE_AUTH_MODE = 1275
WARN_FIELD_RESOLVED = 1276
BAD_SLAVE_UNTIL_COND = 1277
MISSING_SKIP_SLAVE = 1278
UNTIL_COND_IGNORED = 1279
WRONG_NAME_FOR_INDEX = 1280
WRONG_NAME_FOR_CATALOG = 1281
WARN_QC_RESIZE = 1282
BAD_FT_COLUMN = 1283
UNKNOWN_KEY_CACHE = 1284
WARN_HOSTNAME_WONT_WORK = 1285
UNKNOWN_STORAGE_ENGINE = 1286
WARN_DEPRECATED_SYNTAX = 1287
NON_UPDATABLE_TABLE = 1288
FEATURE_DISABLED = 1289
OPTION_PREVENTS_STATEMENT = 1290
DUPLICATED_VALUE_IN_TYPE = 1291
TRUNCATED_WRONG_VALUE = 1292
TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293
INVALID_ON_UPDATE = 1294
UNSUPPORTED_PS = 1295
GET_ERRMSG = 1296
GET_TEMPORARY_ERRMSG = 1297
UNKNOWN_TIME_ZONE = 1298
WARN_INVALID_TIMESTAMP = 1299
INVALID_CHARACTER_STRING = 1300
WARN_ALLOWED_PACKET_OVERFLOWED = 1301
CONFLICTING_DECLARATIONS = 1302
SP_NO_RECURSIVE_CREATE = 1303
SP_ALREADY_EXISTS = 1304
SP_DOES_NOT_EXIST = 1305
SP_DROP_FAILED = 1306
SP_STORE_FAILED = 1307
SP_LILABEL_MISMATCH = 1308
SP_LABEL_REDEFINE = 1309
SP_LABEL_MISMATCH = 1310
SP_UNINIT_VAR = 1311
SP_BADSELECT = 1312
SP_BADRETURN = 1313
SP_BADSTATEMENT = 1314
UPDATE_LOG_DEPRECATED_IGNORED = 1315
UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
QUERY_INTERRUPTED = 1317
SP_WRONG_NO_OF_ARGS = 1318
SP_COND_MISMATCH = 1319
SP_NORETURN = 1320
SP_NORETURNEND = 1321
SP_BAD_CURSOR_QUERY = 1322
SP_BAD_CURSOR_SELECT = 1323
SP_CURSOR_MISMATCH = 1324
SP_CURSOR_ALREADY_OPEN = 1325
SP_CURSOR_NOT_OPEN = 1326
SP_UNDECLARED_VAR = 1327
SP_WRONG_NO_OF_FETCH_ARGS = 1328
SP_FETCH_NO_DATA = 1329
SP_DUP_PARAM = 1330
SP_DUP_VAR = 1331
SP_DUP_COND = 1332
SP_DUP_CURS = 1333
SP_CANT_ALTER = 1334
SP_SUBSELECT_NYI = 1335
STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
SP_VARCOND_AFTER_CURSHNDLR = 1337
SP_CURSOR_AFTER_HANDLER = 1338
SP_CASE_NOT_FOUND = 1339
FPARSER_TOO_BIG_FILE = 1340
FPARSER_BAD_HEADER = 1341
FPARSER_EOF_IN_COMMENT = 1342
FPARSER_ERROR_IN_PARAMETER = 1343
FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
VIEW_NO_EXPLAIN = 1345
FRM_UNKNOWN_TYPE = 1346
WRONG_OBJECT = 1347
NONUPDATEABLE_COLUMN = 1348
VIEW_SELECT_DERIVED = 1349
VIEW_SELECT_CLAUSE = 1350
VIEW_SELECT_VARIABLE = 1351
VIEW_SELECT_TMPTABLE = 1352
VIEW_WRONG_LIST = 1353
WARN_VIEW_MERGE = 1354
WARN_VIEW_WITHOUT_KEY = 1355
VIEW_INVALID = 1356
SP_NO_DROP_SP = 1357
SP_GOTO_IN_HNDLR = 1358
TRG_ALREADY_EXISTS = 1359
TRG_DOES_NOT_EXIST = 1360
TRG_ON_VIEW_OR_TEMP_TABLE = 1361
TRG_CANT_CHANGE_ROW = 1362
TRG_NO_SUCH_ROW_IN_TRG = 1363
NO_DEFAULT_FOR_FIELD = 1364
DIVISION_BY_ZERO = 1365
TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
ILLEGAL_VALUE_FOR_TYPE = 1367
VIEW_NONUPD_CHECK = 1368
VIEW_CHECK_FAILED = 1369
PROCACCESS_DENIED_ERROR = 1370
RELAY_LOG_FAIL = 1371
PASSWD_LENGTH = 1372
UNKNOWN_TARGET_BINLOG = 1373
IO_ERR_LOG_INDEX_READ = 1374
BINLOG_PURGE_PROHIBITED = 1375
FSEEK_FAIL = 1376
BINLOG_PURGE_FATAL_ERR = 1377
LOG_IN_USE = 1378
LOG_PURGE_UNKNOWN_ERR = 1379
RELAY_LOG_INIT = 1380
NO_BINARY_LOGGING = 1381
RESERVED_SYNTAX = 1382
WSAS_FAILED = 1383
DIFF_GROUPS_PROC = 1384
NO_GROUP_FOR_PROC = 1385
ORDER_WITH_PROC = 1386
LOGGING_PROHIBIT_CHANGING_OF = 1387
NO_FILE_MAPPING = 1388
WRONG_MAGIC = 1389
PS_MANY_PARAM = 1390
KEY_PART_0 = 1391
VIEW_CHECKSUM = 1392
VIEW_MULTIUPDATE = 1393
VIEW_NO_INSERT_FIELD_LIST = 1394
VIEW_DELETE_MERGE_VIEW = 1395
CANNOT_USER = 1396
XAER_NOTA = 1397
XAER_INVAL = 1398
XAER_RMFAIL = 1399
XAER_OUTSIDE = 1400
XAER_RMERR = 1401
XA_RBROLLBACK = 1402
NONEXISTING_PROC_GRANT = 1403
PROC_AUTO_GRANT_FAIL = 1404
PROC_AUTO_REVOKE_FAIL = 1405
DATA_TOO_LONG = 1406
SP_BAD_SQLSTATE = 1407
STARTUP = 1408
LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
CANT_CREATE_USER_WITH_GRANT = 1410
WRONG_VALUE_FOR_TYPE = 1411
TABLE_DEF_CHANGED = 1412
SP_DUP_HANDLER = 1413
SP_NOT_VAR_ARG = 1414
SP_NO_RETSET = 1415
CANT_CREATE_GEOMETRY_OBJECT = 1416
FAILED_ROUTINE_BREAK_BINLOG = 1417
BINLOG_UNSAFE_ROUTINE = 1418
BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
EXEC_STMT_WITH_OPEN_CURSOR = 1420
STMT_HAS_NO_OPEN_CURSOR = 1421
COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
NO_DEFAULT_FOR_VIEW_FIELD = 1423
SP_NO_RECURSION = 1424
TOO_BIG_SCALE = 1425
TOO_BIG_PRECISION = 1426
M_BIGGER_THAN_D = 1427
WRONG_LOCK_OF_SYSTEM_TABLE = 1428
CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
QUERY_ON_FOREIGN_DATA_SOURCE = 1430
FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
FOREIGN_DATA_STRING_INVALID = 1433
CANT_CREATE_FEDERATED_TABLE = 1434
TRG_IN_WRONG_SCHEMA = 1435
STACK_OVERRUN_NEED_MORE = 1436
TOO_LONG_BODY = 1437
WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
TOO_BIG_DISPLAYWIDTH = 1439
XAER_DUPID = 1440
DATETIME_FUNCTION_OVERFLOW = 1441
CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
VIEW_PREVENT_UPDATE = 1443
PS_NO_RECURSION = 1444
SP_CANT_SET_AUTOCOMMIT = 1445
MALFORMED_DEFINER = 1446
VIEW_FRM_NO_USER = 1447
VIEW_OTHER_USER = 1448
NO_SUCH_USER = 1449
FORBID_SCHEMA_CHANGE = 1450
ROW_IS_REFERENCED_2 = 1451
NO_REFERENCED_ROW_2 = 1452
SP_BAD_VAR_SHADOW = 1453
TRG_NO_DEFINER = 1454
OLD_FILE_FORMAT = 1455
SP_RECURSION_LIMIT = 1456
SP_PROC_TABLE_CORRUPT = 1457
SP_WRONG_NAME = 1458
TABLE_NEEDS_UPGRADE = 1459
SP_NO_AGGREGATE = 1460
MAX_PREPARED_STMT_COUNT_REACHED = 1461
VIEW_RECURSIVE = 1462
NON_GROUPING_FIELD_USED = 1463
TABLE_CANT_HANDLE_SPKEYS = 1464
NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
USERNAME = 1466
HOSTNAME = 1467
WRONG_STRING_LENGTH = 1468
ERROR_LAST = 1468
# MariaDB only
STATEMENT_TIMEOUT = 1969
QUERY_TIMEOUT = 3024
# https://github.com/PyMySQL/PyMySQL/issues/607
CONSTRAINT_FAILED = 4025

View file

@ -0,0 +1,31 @@
DECIMAL = 0
TINY = 1
SHORT = 2
LONG = 3
FLOAT = 4
DOUBLE = 5
NULL = 6
TIMESTAMP = 7
LONGLONG = 8
INT24 = 9
DATE = 10
TIME = 11
DATETIME = 12
YEAR = 13
NEWDATE = 14
VARCHAR = 15
BIT = 16
JSON = 245
NEWDECIMAL = 246
ENUM = 247
SET = 248
TINY_BLOB = 249
MEDIUM_BLOB = 250
LONG_BLOB = 251
BLOB = 252
VAR_STRING = 253
STRING = 254
GEOMETRY = 255
CHAR = TINY
INTERVAL = ENUM

View file

@ -0,0 +1,15 @@
NOT_NULL = 1
PRI_KEY = 2
UNIQUE_KEY = 4
MULTIPLE_KEY = 8
BLOB = 16
UNSIGNED = 32
ZEROFILL = 64
BINARY = 128
ENUM = 256
AUTO_INCREMENT = 512
TIMESTAMP = 1024
SET = 2048
PART_KEY = 16384
GROUP = 32767
UNIQUE = 65536

View file

@ -0,0 +1,10 @@
SERVER_STATUS_IN_TRANS = 1
SERVER_STATUS_AUTOCOMMIT = 2
SERVER_MORE_RESULTS_EXISTS = 8
SERVER_QUERY_NO_GOOD_INDEX_USED = 16
SERVER_QUERY_NO_INDEX_USED = 32
SERVER_STATUS_CURSOR_EXISTS = 64
SERVER_STATUS_LAST_ROW_SENT = 128
SERVER_STATUS_DB_DROPPED = 256
SERVER_STATUS_NO_BACKSLASH_ESCAPES = 512
SERVER_STATUS_METADATA_CHANGED = 1024

View file

@ -0,0 +1,363 @@
import datetime
from decimal import Decimal
import re
import time
from .err import ProgrammingError
from .constants import FIELD_TYPE
def escape_item(val, charset, mapping=None):
if mapping is None:
mapping = encoders
encoder = mapping.get(type(val))
# Fallback to default when no encoder found
if not encoder:
try:
encoder = mapping[str]
except KeyError:
raise TypeError("no default type converter defined")
if encoder in (escape_dict, escape_sequence):
val = encoder(val, charset, mapping)
else:
val = encoder(val, mapping)
return val
def escape_dict(val, charset, mapping=None):
raise TypeError("dict can not be used as parameter")
def escape_sequence(val, charset, mapping=None):
n = []
for item in val:
quoted = escape_item(item, charset, mapping)
n.append(quoted)
return "(" + ",".join(n) + ")"
def escape_set(val, charset, mapping=None):
return ",".join([escape_item(x, charset, mapping) for x in val])
def escape_bool(value, mapping=None):
return str(int(value))
def escape_int(value, mapping=None):
return str(value)
def escape_float(value, mapping=None):
s = repr(value)
if s in ("inf", "-inf", "nan"):
raise ProgrammingError("%s can not be used with MySQL" % s)
if "e" not in s:
s += "e0"
return s
_escape_table = [chr(x) for x in range(128)]
_escape_table[0] = "\\0"
_escape_table[ord("\\")] = "\\\\"
_escape_table[ord("\n")] = "\\n"
_escape_table[ord("\r")] = "\\r"
_escape_table[ord("\032")] = "\\Z"
_escape_table[ord('"')] = '\\"'
_escape_table[ord("'")] = "\\'"
def escape_string(value, mapping=None):
"""escapes *value* without adding quote.
Value should be unicode
"""
return value.translate(_escape_table)
def escape_bytes_prefixed(value, mapping=None):
return "_binary'%s'" % value.decode("ascii", "surrogateescape").translate(
_escape_table
)
def escape_bytes(value, mapping=None):
return "'%s'" % value.decode("ascii", "surrogateescape").translate(_escape_table)
def escape_str(value, mapping=None):
return "'%s'" % escape_string(str(value), mapping)
def escape_None(value, mapping=None):
return "NULL"
def escape_timedelta(obj, mapping=None):
seconds = int(obj.seconds) % 60
minutes = int(obj.seconds // 60) % 60
hours = int(obj.seconds // 3600) % 24 + int(obj.days) * 24
if obj.microseconds:
fmt = "'{0:02d}:{1:02d}:{2:02d}.{3:06d}'"
else:
fmt = "'{0:02d}:{1:02d}:{2:02d}'"
return fmt.format(hours, minutes, seconds, obj.microseconds)
def escape_time(obj, mapping=None):
if obj.microsecond:
fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
else:
fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}'"
return fmt.format(obj)
def escape_datetime(obj, mapping=None):
if obj.microsecond:
fmt = (
"'{0.year:04}-{0.month:02}-{0.day:02}"
+ " {0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
)
else:
fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}'"
return fmt.format(obj)
def escape_date(obj, mapping=None):
fmt = "'{0.year:04}-{0.month:02}-{0.day:02}'"
return fmt.format(obj)
def escape_struct_time(obj, mapping=None):
return escape_datetime(datetime.datetime(*obj[:6]))
def Decimal2Literal(o, d):
return format(o, "f")
def _convert_second_fraction(s):
if not s:
return 0
# Pad zeros to ensure the fraction length in microseconds
s = s.ljust(6, "0")
return int(s[:6])
DATETIME_RE = re.compile(
r"(\d{1,4})-(\d{1,2})-(\d{1,2})[T ](\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?"
)
def convert_datetime(obj):
"""Returns a DATETIME or TIMESTAMP column value as a datetime object:
>>> convert_datetime('2007-02-25 23:06:20')
datetime.datetime(2007, 2, 25, 23, 6, 20)
>>> convert_datetime('2007-02-25T23:06:20')
datetime.datetime(2007, 2, 25, 23, 6, 20)
Illegal values are returned as str:
>>> convert_datetime('2007-02-31T23:06:20')
'2007-02-31T23:06:20'
>>> convert_datetime('0000-00-00 00:00:00')
'0000-00-00 00:00:00'
"""
if isinstance(obj, (bytes, bytearray)):
obj = obj.decode("ascii")
m = DATETIME_RE.match(obj)
if not m:
return convert_date(obj)
try:
groups = list(m.groups())
groups[-1] = _convert_second_fraction(groups[-1])
return datetime.datetime(*[int(x) for x in groups])
except ValueError:
return convert_date(obj)
TIMEDELTA_RE = re.compile(r"(-)?(\d{1,3}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
def convert_timedelta(obj):
"""Returns a TIME column as a timedelta object:
>>> convert_timedelta('25:06:17')
datetime.timedelta(days=1, seconds=3977)
>>> convert_timedelta('-25:06:17')
datetime.timedelta(days=-2, seconds=82423)
Illegal values are returned as string:
>>> convert_timedelta('random crap')
'random crap'
Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
can accept values as (+|-)DD HH:MM:SS. The latter format will not
be parsed correctly by this function.
"""
if isinstance(obj, (bytes, bytearray)):
obj = obj.decode("ascii")
m = TIMEDELTA_RE.match(obj)
if not m:
return obj
try:
groups = list(m.groups())
groups[-1] = _convert_second_fraction(groups[-1])
negate = -1 if groups[0] else 1
hours, minutes, seconds, microseconds = groups[1:]
tdelta = (
datetime.timedelta(
hours=int(hours),
minutes=int(minutes),
seconds=int(seconds),
microseconds=int(microseconds),
)
* negate
)
return tdelta
except ValueError:
return obj
TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
def convert_time(obj):
"""Returns a TIME column as a time object:
>>> convert_time('15:06:17')
datetime.time(15, 6, 17)
Illegal values are returned as str:
>>> convert_time('-25:06:17')
'-25:06:17'
>>> convert_time('random crap')
'random crap'
Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
can accept values as (+|-)DD HH:MM:SS. The latter format will not
be parsed correctly by this function.
Also note that MySQL's TIME column corresponds more closely to
Python's timedelta and not time. However if you want TIME columns
to be treated as time-of-day and not a time offset, then you can
use set this function as the converter for FIELD_TYPE.TIME.
"""
if isinstance(obj, (bytes, bytearray)):
obj = obj.decode("ascii")
m = TIME_RE.match(obj)
if not m:
return obj
try:
groups = list(m.groups())
groups[-1] = _convert_second_fraction(groups[-1])
hours, minutes, seconds, microseconds = groups
return datetime.time(
hour=int(hours),
minute=int(minutes),
second=int(seconds),
microsecond=int(microseconds),
)
except ValueError:
return obj
def convert_date(obj):
"""Returns a DATE column as a date object:
>>> convert_date('2007-02-26')
datetime.date(2007, 2, 26)
Illegal values are returned as str:
>>> convert_date('2007-02-31')
'2007-02-31'
>>> convert_date('0000-00-00')
'0000-00-00'
"""
if isinstance(obj, (bytes, bytearray)):
obj = obj.decode("ascii")
try:
return datetime.date(*[int(x) for x in obj.split("-", 2)])
except ValueError:
return obj
def through(x):
return x
# def convert_bit(b):
# b = "\x00" * (8 - len(b)) + b # pad w/ zeroes
# return struct.unpack(">Q", b)[0]
#
# the snippet above is right, but MySQLdb doesn't process bits,
# so we shouldn't either
convert_bit = through
encoders = {
bool: escape_bool,
int: escape_int,
float: escape_float,
str: escape_str,
bytes: escape_bytes,
tuple: escape_sequence,
list: escape_sequence,
set: escape_sequence,
frozenset: escape_sequence,
dict: escape_dict,
type(None): escape_None,
datetime.date: escape_date,
datetime.datetime: escape_datetime,
datetime.timedelta: escape_timedelta,
datetime.time: escape_time,
time.struct_time: escape_struct_time,
Decimal: Decimal2Literal,
}
decoders = {
FIELD_TYPE.BIT: convert_bit,
FIELD_TYPE.TINY: int,
FIELD_TYPE.SHORT: int,
FIELD_TYPE.LONG: int,
FIELD_TYPE.FLOAT: float,
FIELD_TYPE.DOUBLE: float,
FIELD_TYPE.LONGLONG: int,
FIELD_TYPE.INT24: int,
FIELD_TYPE.YEAR: int,
FIELD_TYPE.TIMESTAMP: convert_datetime,
FIELD_TYPE.DATETIME: convert_datetime,
FIELD_TYPE.TIME: convert_timedelta,
FIELD_TYPE.DATE: convert_date,
FIELD_TYPE.BLOB: through,
FIELD_TYPE.TINY_BLOB: through,
FIELD_TYPE.MEDIUM_BLOB: through,
FIELD_TYPE.LONG_BLOB: through,
FIELD_TYPE.STRING: through,
FIELD_TYPE.VAR_STRING: through,
FIELD_TYPE.VARCHAR: through,
FIELD_TYPE.DECIMAL: Decimal,
FIELD_TYPE.NEWDECIMAL: Decimal,
}
# for MySQLdb compatibility
conversions = encoders.copy()
conversions.update(decoders)
Thing2Literal = escape_str
# Run doctests with `pytest --doctest-modules pymysql/converters.py`

View file

@ -0,0 +1,531 @@
import re
import warnings
from . import err
#: Regular expression for :meth:`Cursor.executemany`.
#: executemany only supports simple bulk insert.
#: You can use it to load large dataset.
RE_INSERT_VALUES = re.compile(
r"\s*((?:INSERT|REPLACE)\b.+\bVALUES?\s*)"
+ r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))"
+ r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z",
re.IGNORECASE | re.DOTALL,
)
class Cursor:
"""
This is the object used to interact with the database.
Do not create an instance of a Cursor yourself. Call
connections.Connection.cursor().
See `Cursor <https://www.python.org/dev/peps/pep-0249/#cursor-objects>`_ in
the specification.
"""
#: Max statement size which :meth:`executemany` generates.
#:
#: Max size of allowed statement is max_allowed_packet - packet_header_size.
#: Default value of max_allowed_packet is 1048576.
max_stmt_length = 1024000
def __init__(self, connection):
self.connection = connection
self.warning_count = 0
self.description = None
self.rownumber = 0
self.rowcount = -1
self.arraysize = 1
self._executed = None
self._result = None
self._rows = None
def close(self):
"""
Closing a cursor just exhausts all remaining data.
"""
conn = self.connection
if conn is None:
return
try:
while self.nextset():
pass
finally:
self.connection = None
def __enter__(self):
return self
def __exit__(self, *exc_info):
del exc_info
self.close()
def _get_db(self):
if not self.connection:
raise err.ProgrammingError("Cursor closed")
return self.connection
def _check_executed(self):
if not self._executed:
raise err.ProgrammingError("execute() first")
def _conv_row(self, row):
return row
def setinputsizes(self, *args):
"""Does nothing, required by DB API."""
def setoutputsizes(self, *args):
"""Does nothing, required by DB API."""
def _nextset(self, unbuffered=False):
"""Get the next query set."""
conn = self._get_db()
current_result = self._result
if current_result is None or current_result is not conn._result:
return None
if not current_result.has_next:
return None
self._result = None
self._clear_result()
conn.next_result(unbuffered=unbuffered)
self._do_get_result()
return True
def nextset(self):
return self._nextset(False)
def _escape_args(self, args, conn):
if isinstance(args, (tuple, list)):
return tuple(conn.literal(arg) for arg in args)
elif isinstance(args, dict):
return {key: conn.literal(val) for (key, val) in args.items()}
else:
# If it's not a dictionary let's try escaping it anyways.
# Worst case it will throw a Value error
return conn.escape(args)
def mogrify(self, query, args=None):
"""
Returns the exact string that would be sent to the database by calling the
execute() method.
:param query: Query to mogrify.
:type query: str
:param args: Parameters used with query. (optional)
:type args: tuple, list or dict
:return: The query with argument binding applied.
:rtype: str
This method follows the extension to the DB API 2.0 followed by Psycopg.
"""
conn = self._get_db()
if args is not None:
query = query % self._escape_args(args, conn)
return query
def execute(self, query, args=None):
"""Execute a query.
:param query: Query to execute.
:type query: str
:param args: Parameters used with query. (optional)
:type args: tuple, list or dict
:return: Number of affected rows.
:rtype: int
If args is a list or tuple, %s can be used as a placeholder in the query.
If args is a dict, %(name)s can be used as a placeholder in the query.
"""
while self.nextset():
pass
query = self.mogrify(query, args)
result = self._query(query)
self._executed = query
return result
def executemany(self, query, args):
"""Run several data against one query.
:param query: Query to execute.
:type query: str
:param args: Sequence of sequences or mappings. It is used as parameter.
:type args: tuple or list
:return: Number of rows affected, if any.
:rtype: int or None
This method improves performance on multiple-row INSERT and
REPLACE. Otherwise it is equivalent to looping over args with
execute().
"""
if not args:
return
m = RE_INSERT_VALUES.match(query)
if m:
q_prefix = m.group(1) % ()
q_values = m.group(2).rstrip()
q_postfix = m.group(3) or ""
assert q_values[0] == "(" and q_values[-1] == ")"
return self._do_execute_many(
q_prefix,
q_values,
q_postfix,
args,
self.max_stmt_length,
self._get_db().encoding,
)
self.rowcount = sum(self.execute(query, arg) for arg in args)
return self.rowcount
def _do_execute_many(
self, prefix, values, postfix, args, max_stmt_length, encoding
):
conn = self._get_db()
escape = self._escape_args
if isinstance(prefix, str):
prefix = prefix.encode(encoding)
if isinstance(postfix, str):
postfix = postfix.encode(encoding)
sql = bytearray(prefix)
args = iter(args)
v = values % escape(next(args), conn)
if isinstance(v, str):
v = v.encode(encoding, "surrogateescape")
sql += v
rows = 0
for arg in args:
v = values % escape(arg, conn)
if isinstance(v, str):
v = v.encode(encoding, "surrogateescape")
if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
rows += self.execute(sql + postfix)
sql = bytearray(prefix)
else:
sql += b","
sql += v
rows += self.execute(sql + postfix)
self.rowcount = rows
return rows
def callproc(self, procname, args=()):
"""Execute stored procedure procname with args.
:param procname: Name of procedure to execute on server.
:type procname: str
:param args: Sequence of parameters to use with procedure.
:type args: tuple or list
Returns the original args.
Compatibility warning: PEP-249 specifies that any modified
parameters must be returned. This is currently impossible
as they are only available by storing them in a server
variable and then retrieved by a query. Since stored
procedures return zero or more result sets, there is no
reliable way to get at OUT or INOUT parameters via callproc.
The server variables are named @_procname_n, where procname
is the parameter above and n is the position of the parameter
(from zero). Once all result sets generated by the procedure
have been fetched, you can issue a SELECT @_procname_0, ...
query using .execute() to get any OUT or INOUT values.
Compatibility warning: The act of calling a stored procedure
itself creates an empty result set. This appears after any
result sets generated by the procedure. This is non-standard
behavior with respect to the DB-API. Be sure to use nextset()
to advance through all result sets; otherwise you may get
disconnected.
"""
conn = self._get_db()
if args:
fmt = f"@_{procname}_%d=%s"
self._query(
"SET %s"
% ",".join(
fmt % (index, conn.escape(arg)) for index, arg in enumerate(args)
)
)
self.nextset()
q = "CALL {}({})".format(
procname,
",".join(["@_%s_%d" % (procname, i) for i in range(len(args))]),
)
self._query(q)
self._executed = q
return args
def fetchone(self):
"""Fetch the next row."""
self._check_executed()
if self._rows is None or self.rownumber >= len(self._rows):
return None
result = self._rows[self.rownumber]
self.rownumber += 1
return result
def fetchmany(self, size=None):
"""Fetch several rows."""
self._check_executed()
if self._rows is None:
# Django expects () for EOF.
# https://github.com/django/django/blob/0c1518ee429b01c145cf5b34eab01b0b92f8c246/django/db/backends/mysql/features.py#L8
return ()
end = self.rownumber + (size or self.arraysize)
result = self._rows[self.rownumber : end]
self.rownumber = min(end, len(self._rows))
return result
def fetchall(self):
"""Fetch all the rows."""
self._check_executed()
if self._rows is None:
return []
if self.rownumber:
result = self._rows[self.rownumber :]
else:
result = self._rows
self.rownumber = len(self._rows)
return result
def scroll(self, value, mode="relative"):
self._check_executed()
if mode == "relative":
r = self.rownumber + value
elif mode == "absolute":
r = value
else:
raise err.ProgrammingError("unknown scroll mode %s" % mode)
if not (0 <= r < len(self._rows)):
raise IndexError("out of range")
self.rownumber = r
def _query(self, q):
conn = self._get_db()
self._clear_result()
conn.query(q)
self._do_get_result()
return self.rowcount
def _clear_result(self):
self.rownumber = 0
self._result = None
self.rowcount = 0
self.warning_count = 0
self.description = None
self.lastrowid = None
self._rows = None
def _do_get_result(self):
conn = self._get_db()
self._result = result = conn._result
self.rowcount = result.affected_rows
self.warning_count = result.warning_count
self.description = result.description
self.lastrowid = result.insert_id
self._rows = result.rows
def __iter__(self):
return self
def __next__(self):
row = self.fetchone()
if row is None:
raise StopIteration
return row
def __getattr__(self, name):
# DB-API 2.0 optional extension says these errors can be accessed
# via Connection object. But MySQLdb had defined them on Cursor object.
if name in (
"Warning",
"Error",
"InterfaceError",
"DatabaseError",
"DataError",
"OperationalError",
"IntegrityError",
"InternalError",
"ProgrammingError",
"NotSupportedError",
):
# Deprecated since v1.1
warnings.warn(
"PyMySQL errors hould be accessed from `pymysql` package",
DeprecationWarning,
stacklevel=2,
)
return getattr(err, name)
raise AttributeError(name)
class DictCursorMixin:
# You can override this to use OrderedDict or other dict-like types.
dict_type = dict
def _do_get_result(self):
super()._do_get_result()
fields = []
if self.description:
for f in self._result.fields:
name = f.name
if name in fields:
name = f.table_name + "." + name
fields.append(name)
self._fields = fields
if fields and self._rows:
self._rows = [self._conv_row(r) for r in self._rows]
def _conv_row(self, row):
if row is None:
return None
return self.dict_type(zip(self._fields, row))
class DictCursor(DictCursorMixin, Cursor):
"""A cursor which returns results as a dictionary"""
class SSCursor(Cursor):
"""
Unbuffered Cursor, mainly useful for queries that return a lot of data,
or for connections to remote servers over a slow network.
Instead of copying every row of data into a buffer, this will fetch
rows as needed. The upside of this is the client uses much less memory,
and rows are returned much faster when traveling over a slow network
or if the result set is very big.
There are limitations, though. The MySQL protocol doesn't support
returning the total number of rows, so the only way to tell how many rows
there are is to iterate over every row returned. Also, it currently isn't
possible to scroll backwards, as only the current row is held in memory.
"""
def _conv_row(self, row):
return row
def close(self):
conn = self.connection
if conn is None:
return
if self._result is not None and self._result is conn._result:
self._result._finish_unbuffered_query()
try:
while self.nextset():
pass
finally:
self.connection = None
__del__ = close
def _query(self, q):
conn = self._get_db()
self._clear_result()
conn.query(q, unbuffered=True)
self._do_get_result()
return self.rowcount
def nextset(self):
return self._nextset(unbuffered=True)
def read_next(self):
"""Read next row."""
return self._conv_row(self._result._read_rowdata_packet_unbuffered())
def fetchone(self):
"""Fetch next row."""
self._check_executed()
row = self.read_next()
if row is None:
self.warning_count = self._result.warning_count
return None
self.rownumber += 1
return row
def fetchall(self):
"""
Fetch all, as per MySQLdb. Pretty useless for large queries, as
it is buffered. See fetchall_unbuffered(), if you want an unbuffered
generator version of this method.
"""
return list(self.fetchall_unbuffered())
def fetchall_unbuffered(self):
"""
Fetch all, implemented as a generator, which isn't to standard,
however, it doesn't make sense to return everything in a list, as that
would use ridiculous memory for large result sets.
"""
return iter(self.fetchone, None)
def fetchmany(self, size=None):
"""Fetch many."""
self._check_executed()
if size is None:
size = self.arraysize
rows = []
for i in range(size):
row = self.read_next()
if row is None:
self.warning_count = self._result.warning_count
break
rows.append(row)
self.rownumber += 1
if not rows:
# Django expects () for EOF.
# https://github.com/django/django/blob/0c1518ee429b01c145cf5b34eab01b0b92f8c246/django/db/backends/mysql/features.py#L8
return ()
return rows
def scroll(self, value, mode="relative"):
self._check_executed()
if mode == "relative":
if value < 0:
raise err.NotSupportedError(
"Backwards scrolling not supported by this cursor"
)
for _ in range(value):
self.read_next()
self.rownumber += value
elif mode == "absolute":
if value < self.rownumber:
raise err.NotSupportedError(
"Backwards scrolling not supported by this cursor"
)
end = value - self.rownumber
for _ in range(end):
self.read_next()
self.rownumber = value
else:
raise err.ProgrammingError("unknown scroll mode %s" % mode)
class SSDictCursor(DictCursorMixin, SSCursor):
"""An unbuffered cursor, which returns results as a dictionary"""

View file

@ -0,0 +1,150 @@
import struct
from .constants import ER
class MySQLError(Exception):
"""Exception related to operation with MySQL."""
class Warning(Warning, MySQLError):
"""Exception raised for important warnings like data truncations
while inserting, etc."""
class Error(MySQLError):
"""Exception that is the base class of all other error exceptions
(not Warning)."""
class InterfaceError(Error):
"""Exception raised for errors that are related to the database
interface rather than the database itself."""
class DatabaseError(Error):
"""Exception raised for errors that are related to the
database."""
class DataError(DatabaseError):
"""Exception raised for errors that are due to problems with the
processed data like division by zero, numeric value out of range,
etc."""
class OperationalError(DatabaseError):
"""Exception raised for errors that are related to the database's
operation and not necessarily under the control of the programmer,
e.g. an unexpected disconnect occurs, the data source name is not
found, a transaction could not be processed, a memory allocation
error occurred during processing, etc."""
class IntegrityError(DatabaseError):
"""Exception raised when the relational integrity of the database
is affected, e.g. a foreign key check fails, duplicate key,
etc."""
class InternalError(DatabaseError):
"""Exception raised when the database encounters an internal
error, e.g. the cursor is not valid anymore, the transaction is
out of sync, etc."""
class ProgrammingError(DatabaseError):
"""Exception raised for programming errors, e.g. table not found
or already exists, syntax error in the SQL statement, wrong number
of parameters specified, etc."""
class NotSupportedError(DatabaseError):
"""Exception raised in case a method or database API was used
which is not supported by the database, e.g. requesting a
.rollback() on a connection that does not support transaction or
has transactions turned off."""
error_map = {}
def _map_error(exc, *errors):
for error in errors:
error_map[error] = exc
_map_error(
ProgrammingError,
ER.DB_CREATE_EXISTS,
ER.SYNTAX_ERROR,
ER.PARSE_ERROR,
ER.NO_SUCH_TABLE,
ER.WRONG_DB_NAME,
ER.WRONG_TABLE_NAME,
ER.FIELD_SPECIFIED_TWICE,
ER.INVALID_GROUP_FUNC_USE,
ER.UNSUPPORTED_EXTENSION,
ER.TABLE_MUST_HAVE_COLUMNS,
ER.CANT_DO_THIS_DURING_AN_TRANSACTION,
ER.WRONG_DB_NAME,
ER.WRONG_COLUMN_NAME,
)
_map_error(
DataError,
ER.WARN_DATA_TRUNCATED,
ER.WARN_NULL_TO_NOTNULL,
ER.WARN_DATA_OUT_OF_RANGE,
ER.NO_DEFAULT,
ER.PRIMARY_CANT_HAVE_NULL,
ER.DATA_TOO_LONG,
ER.DATETIME_FUNCTION_OVERFLOW,
ER.TRUNCATED_WRONG_VALUE_FOR_FIELD,
ER.ILLEGAL_VALUE_FOR_TYPE,
)
_map_error(
IntegrityError,
ER.DUP_ENTRY,
ER.NO_REFERENCED_ROW,
ER.NO_REFERENCED_ROW_2,
ER.ROW_IS_REFERENCED,
ER.ROW_IS_REFERENCED_2,
ER.CANNOT_ADD_FOREIGN,
ER.BAD_NULL_ERROR,
)
_map_error(
NotSupportedError,
ER.WARNING_NOT_COMPLETE_ROLLBACK,
ER.NOT_SUPPORTED_YET,
ER.FEATURE_DISABLED,
ER.UNKNOWN_STORAGE_ENGINE,
)
_map_error(
OperationalError,
ER.DBACCESS_DENIED_ERROR,
ER.ACCESS_DENIED_ERROR,
ER.CON_COUNT_ERROR,
ER.TABLEACCESS_DENIED_ERROR,
ER.COLUMNACCESS_DENIED_ERROR,
ER.CONSTRAINT_FAILED,
ER.LOCK_DEADLOCK,
)
del _map_error, ER
def raise_mysql_exception(data):
errno = struct.unpack("<h", data[1:3])[0]
# https://dev.mysql.com/doc/dev/mysql-server/latest/page_protocol_basic_err_packet.html
# Error packet has optional sqlstate that is 5 bytes and starts with '#'.
if data[3] == 0x23: # '#'
# sqlstate = data[4:9].decode()
# TODO: Append (sqlstate) in the error message. This will be come in next minor release.
errval = data[9:].decode("utf-8", "replace")
else:
errval = data[3:].decode("utf-8", "replace")
errorclass = error_map.get(errno)
if errorclass is None:
errorclass = InternalError if errno < 1000 else OperationalError
raise errorclass(errno, errval)

View file

@ -0,0 +1,21 @@
import configparser
class Parser(configparser.RawConfigParser):
def __init__(self, **kwargs):
kwargs["allow_no_value"] = True
configparser.RawConfigParser.__init__(self, **kwargs)
def __remove_quotes(self, value):
quotes = ["'", '"']
for quote in quotes:
if len(value) >= 2 and value[0] == value[-1] == quote:
return value[1:-1]
return value
def optionxform(self, key):
return key.lower().replace("_", "-")
def get(self, section, option):
value = configparser.RawConfigParser.get(self, section, option)
return self.__remove_quotes(value)

View file

@ -0,0 +1,356 @@
# Python implementation of low level MySQL client-server protocol
# http://dev.mysql.com/doc/internals/en/client-server-protocol.html
from .charset import MBLENGTH
from .constants import FIELD_TYPE, SERVER_STATUS
from . import err
import struct
import sys
DEBUG = False
NULL_COLUMN = 251
UNSIGNED_CHAR_COLUMN = 251
UNSIGNED_SHORT_COLUMN = 252
UNSIGNED_INT24_COLUMN = 253
UNSIGNED_INT64_COLUMN = 254
def dump_packet(data): # pragma: no cover
def printable(data):
if 32 <= data < 127:
return chr(data)
return "."
try:
print("packet length:", len(data))
for i in range(1, 7):
f = sys._getframe(i)
print("call[%d]: %s (line %d)" % (i, f.f_code.co_name, f.f_lineno))
print("-" * 66)
except ValueError:
pass
dump_data = [data[i : i + 16] for i in range(0, min(len(data), 256), 16)]
for d in dump_data:
print(
" ".join(f"{x:02X}" for x in d)
+ " " * (16 - len(d))
+ " " * 2
+ "".join(printable(x) for x in d)
)
print("-" * 66)
print()
class MysqlPacket:
"""Representation of a MySQL response packet.
Provides an interface for reading/parsing the packet results.
"""
__slots__ = ("_position", "_data")
def __init__(self, data, encoding):
self._position = 0
self._data = data
def get_all_data(self):
return self._data
def read(self, size):
"""Read the first 'size' bytes in packet and advance cursor past them."""
result = self._data[self._position : (self._position + size)]
if len(result) != size:
error = (
"Result length not requested length:\n"
f"Expected={size}. Actual={len(result)}. Position: {self._position}. Data Length: {len(self._data)}"
)
if DEBUG:
print(error)
self.dump()
raise AssertionError(error)
self._position += size
return result
def read_all(self):
"""Read all remaining data in the packet.
(Subsequent read() will return errors.)
"""
result = self._data[self._position :]
self._position = None # ensure no subsequent read()
return result
def advance(self, length):
"""Advance the cursor in data buffer 'length' bytes."""
new_position = self._position + length
if new_position < 0 or new_position > len(self._data):
raise Exception(
f"Invalid advance amount ({length}) for cursor. Position={new_position}"
)
self._position = new_position
def rewind(self, position=0):
"""Set the position of the data buffer cursor to 'position'."""
if position < 0 or position > len(self._data):
raise Exception("Invalid position to rewind cursor to: %s." % position)
self._position = position
def get_bytes(self, position, length=1):
"""Get 'length' bytes starting at 'position'.
Position is start of payload (first four packet header bytes are not
included) starting at index '0'.
No error checking is done. If requesting outside end of buffer
an empty string (or string shorter than 'length') may be returned!
"""
return self._data[position : (position + length)]
def read_uint8(self):
result = self._data[self._position]
self._position += 1
return result
def read_uint16(self):
result = struct.unpack_from("<H", self._data, self._position)[0]
self._position += 2
return result
def read_uint24(self):
low, high = struct.unpack_from("<HB", self._data, self._position)
self._position += 3
return low + (high << 16)
def read_uint32(self):
result = struct.unpack_from("<I", self._data, self._position)[0]
self._position += 4
return result
def read_uint64(self):
result = struct.unpack_from("<Q", self._data, self._position)[0]
self._position += 8
return result
def read_string(self):
end_pos = self._data.find(b"\0", self._position)
if end_pos < 0:
return None
result = self._data[self._position : end_pos]
self._position = end_pos + 1
return result
def read_length_encoded_integer(self):
"""Read a 'Length Coded Binary' number from the data buffer.
Length coded numbers can be anywhere from 1 to 9 bytes depending
on the value of the first byte.
"""
c = self.read_uint8()
if c == NULL_COLUMN:
return None
if c < UNSIGNED_CHAR_COLUMN:
return c
elif c == UNSIGNED_SHORT_COLUMN:
return self.read_uint16()
elif c == UNSIGNED_INT24_COLUMN:
return self.read_uint24()
elif c == UNSIGNED_INT64_COLUMN:
return self.read_uint64()
def read_length_coded_string(self):
"""Read a 'Length Coded String' from the data buffer.
A 'Length Coded String' consists first of a length coded
(unsigned, positive) integer represented in 1-9 bytes followed by
that many bytes of binary data. (For example "cat" would be "3cat".)
"""
length = self.read_length_encoded_integer()
if length is None:
return None
return self.read(length)
def read_struct(self, fmt):
s = struct.Struct(fmt)
result = s.unpack_from(self._data, self._position)
self._position += s.size
return result
def is_ok_packet(self):
# https://dev.mysql.com/doc/internals/en/packet-OK_Packet.html
return self._data[0] == 0 and len(self._data) >= 7
def is_eof_packet(self):
# http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet
# Caution: \xFE may be LengthEncodedInteger.
# If \xFE is LengthEncodedInteger header, 8bytes followed.
return self._data[0] == 0xFE and len(self._data) < 9
def is_auth_switch_request(self):
# http://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
return self._data[0] == 0xFE
def is_extra_auth_data(self):
# https://dev.mysql.com/doc/internals/en/successful-authentication.html
return self._data[0] == 1
def is_resultset_packet(self):
field_count = self._data[0]
return 1 <= field_count <= 250
def is_load_local_packet(self):
return self._data[0] == 0xFB
def is_error_packet(self):
return self._data[0] == 0xFF
def check_error(self):
if self.is_error_packet():
self.raise_for_error()
def raise_for_error(self):
self.rewind()
self.advance(1) # field_count == error (we already know that)
errno = self.read_uint16()
if DEBUG:
print("errno =", errno)
err.raise_mysql_exception(self._data)
def dump(self):
dump_packet(self._data)
class FieldDescriptorPacket(MysqlPacket):
"""A MysqlPacket that represents a specific column's metadata in the result.
Parsing is automatically done and the results are exported via public
attributes on the class such as: db, table_name, name, length, type_code.
"""
def __init__(self, data, encoding):
MysqlPacket.__init__(self, data, encoding)
self._parse_field_descriptor(encoding)
def _parse_field_descriptor(self, encoding):
"""Parse the 'Field Descriptor' (Metadata) packet.
This is compatible with MySQL 4.1+ (not compatible with MySQL 4.0).
"""
self.catalog = self.read_length_coded_string()
self.db = self.read_length_coded_string()
self.table_name = self.read_length_coded_string().decode(encoding)
self.org_table = self.read_length_coded_string().decode(encoding)
self.name = self.read_length_coded_string().decode(encoding)
self.org_name = self.read_length_coded_string().decode(encoding)
(
self.charsetnr,
self.length,
self.type_code,
self.flags,
self.scale,
) = self.read_struct("<xHIBHBxx")
# 'default' is a length coded binary and is still in the buffer?
# not used for normal result sets...
def description(self):
"""Provides a 7-item tuple compatible with the Python PEP249 DB Spec."""
return (
self.name,
self.type_code,
None, # TODO: display_length; should this be self.length?
self.get_column_length(), # 'internal_size'
self.get_column_length(), # 'precision' # TODO: why!?!?
self.scale,
self.flags % 2 == 0,
)
def get_column_length(self):
if self.type_code == FIELD_TYPE.VAR_STRING:
mblen = MBLENGTH.get(self.charsetnr, 1)
return self.length // mblen
return self.length
def __str__(self):
return "{} {!r}.{!r}.{!r}, type={}, flags={:x}".format(
self.__class__,
self.db,
self.table_name,
self.name,
self.type_code,
self.flags,
)
class OKPacketWrapper:
"""
OK Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_ok_packet():
raise ValueError(
"Cannot create "
+ str(self.__class__.__name__)
+ " object from invalid packet type"
)
self.packet = from_packet
self.packet.advance(1)
self.affected_rows = self.packet.read_length_encoded_integer()
self.insert_id = self.packet.read_length_encoded_integer()
self.server_status, self.warning_count = self.read_struct("<HH")
self.message = self.packet.read_all()
self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
def __getattr__(self, key):
return getattr(self.packet, key)
class EOFPacketWrapper:
"""
EOF Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_eof_packet():
raise ValueError(
f"Cannot create '{self.__class__}' object from invalid packet type"
)
self.packet = from_packet
self.warning_count, self.server_status = self.packet.read_struct("<xhh")
if DEBUG:
print("server_status=", self.server_status)
self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
def __getattr__(self, key):
return getattr(self.packet, key)
class LoadLocalPacketWrapper:
"""
Load Local Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_load_local_packet():
raise ValueError(
f"Cannot create '{self.__class__}' object from invalid packet type"
)
self.packet = from_packet
self.filename = self.packet.get_all_data()[1:]
if DEBUG:
print("filename=", self.filename)
def __getattr__(self, key):
return getattr(self.packet, key)

View file

@ -0,0 +1,20 @@
from time import localtime
from datetime import date, datetime, time, timedelta
Date = date
Time = time
TimeDelta = timedelta
Timestamp = datetime
def DateFromTicks(ticks):
return date(*localtime(ticks)[:3])
def TimeFromTicks(ticks):
return time(*localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
return datetime(*localtime(ticks)[:6])

View file

@ -343,7 +343,7 @@ import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.mysql.plugins.module_utils.database import mysql_quote_identifier
from ansible_collections.community.mysql.plugins.module_utils.mysql import mysql_connect, mysql_driver, mysql_driver_fail_msg, mysql_common_argument_spec
from ansible_collections.community.mysql.plugins.module_utils.mysql import mysql_connect, mysql_driver, mysql_common_argument_spec
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_native
@ -605,9 +605,6 @@ def main():
supports_check_mode=True,
)
if mysql_driver is None:
module.fail_json(msg=mysql_driver_fail_msg)
db = module.params["name"]
if not db:
module.exit_json(changed=False, db=db, db_list=[])

View file

@ -268,21 +268,6 @@ slave_hosts:
type: dict
sample:
- { "2": { "Host": "", "Master_id": 1, "Port": 3306 } }
connector_name:
description: Name of the python connector used by the module. When the connector is not identified, returns C(Unknown).
returned: always
type: str
sample:
- "pymysql"
- "MySQLdb"
version_added: '3.6.0'
connector_version:
description: Version of the python connector used by the module. When the connector is not identified, returns C(Unknown).
returned: always
type: str
sample:
- "1.0.2"
version_added: '3.6.0'
'''
from decimal import Decimal
@ -292,9 +277,6 @@ from ansible_collections.community.mysql.plugins.module_utils.mysql import (
mysql_connect,
mysql_common_argument_spec,
mysql_driver,
mysql_driver_fail_msg,
get_connector_name,
get_connector_version,
get_server_implementation,
)
@ -739,21 +721,15 @@ def main():
if exclude_fields:
exclude_fields = set([f.strip() for f in exclude_fields])
if mysql_driver is None:
module.fail_json(msg=mysql_driver_fail_msg)
connector_name = get_connector_name(mysql_driver)
connector_version = get_connector_version(mysql_driver)
try:
cursor, db_conn = mysql_connect(module, login_user, login_password,
config_file, ssl_cert, ssl_key, ssl_ca, db,
check_hostname=check_hostname,
connect_timeout=connect_timeout, cursor_class='DictCursor')
except Exception as e:
msg = ('unable to connect to database using %s %s, check login_user '
msg = ('unable to connect to database, check login_user '
'and login_password are correct or %s has the credentials. '
'Exception message: %s' % (connector_name, connector_version, config_file, to_native(e)))
'Exception message: %s' % (config_file, to_native(e)))
module.fail_json(msg)
server_implementation = get_server_implementation(cursor)
@ -765,8 +741,6 @@ def main():
mysql = MySQL_Info(module, cursor, server_implementation, user_implementation)
module.exit_json(changed=False,
connector_name=connector_name,
connector_version=connector_version,
**mysql.get_info(filter_, exclude_fields, return_empty_dbs))

View file

@ -26,9 +26,7 @@ options:
as a formatting character. All literal C(%) characters in the query should be
escaped as C(%%).
- Note that if you use the C(IF EXISTS/IF NOT EXISTS) clauses in your query
and C(mysqlclient) or C(PyMySQL 0.10.0+) connectors, the module will report
that the state has been changed even if it has not. If it is important in your
workflow, use the C(PyMySQL 0.9.3) connector instead.
the module will report that the state has been changed even if it has not.
type: raw
required: true
positional_args:
@ -126,7 +124,6 @@ from ansible_collections.community.mysql.plugins.module_utils.mysql import (
mysql_connect,
mysql_common_argument_spec,
mysql_driver,
mysql_driver_fail_msg,
)
from ansible.module_utils._text import to_native
@ -189,9 +186,6 @@ def main():
else:
arguments = None
if mysql_driver is None:
module.fail_json(msg=mysql_driver_fail_msg)
# Connect to DB:
try:
cursor, db_connection = mysql_connect(module, login_user, login_password,

View file

@ -305,7 +305,6 @@ from ansible_collections.community.mysql.plugins.module_utils.mysql import (
get_server_implementation,
mysql_connect,
mysql_driver,
mysql_driver_fail_msg,
mysql_common_argument_spec,
)
from ansible.module_utils._text import to_native
@ -545,12 +544,6 @@ def main():
connection_name = module.params["connection_name"]
channel = module.params['channel']
fail_on_error = module.params['fail_on_error']
if mysql_driver is None:
module.fail_json(msg=mysql_driver_fail_msg)
else:
warnings.filterwarnings('error', category=mysql_driver.Warning)
login_password = module.params["login_password"]
login_user = module.params["login_user"]

View file

@ -305,7 +305,6 @@ from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.mysql.plugins.module_utils.mysql import (
mysql_connect,
mysql_driver,
mysql_driver_fail_msg,
mysql_common_argument_spec
)
from ansible_collections.community.mysql.plugins.module_utils.user import (
@ -1017,9 +1016,6 @@ def main():
if priv and isinstance(priv, dict):
priv = convert_priv_dict_to_str(priv)
if mysql_driver is None:
module.fail_json(msg=mysql_driver_fail_msg)
# TODO Release 4.0.0 : Remove this test and variable assignation
if column_case_sensitive is None:
column_case_sensitive = False

View file

@ -396,7 +396,6 @@ from ansible_collections.community.mysql.plugins.module_utils.database import SQ
from ansible_collections.community.mysql.plugins.module_utils.mysql import (
mysql_connect,
mysql_driver,
mysql_driver_fail_msg,
mysql_common_argument_spec,
set_session_vars,
)
@ -492,9 +491,6 @@ def main():
if priv and isinstance(priv, dict):
priv = convert_priv_dict_to_str(priv)
if mysql_driver is None:
module.fail_json(msg=mysql_driver_fail_msg)
if password_expire_interval and password_expire_interval < 1:
module.fail_json(msg="password_expire_interval value \
should be positive number")

View file

@ -91,7 +91,7 @@ from re import match
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.mysql.plugins.module_utils.database import SQLParseError, mysql_quote_identifier
from ansible_collections.community.mysql.plugins.module_utils.mysql import mysql_connect, mysql_driver, mysql_driver_fail_msg, mysql_common_argument_spec
from ansible_collections.community.mysql.plugins.module_utils.mysql import mysql_connect, mysql_driver, mysql_common_argument_spec
from ansible.module_utils._text import to_native
executed_queries = []
@ -205,10 +205,6 @@ def main():
module.fail_json(msg="Cannot run without variable to operate with")
if match('^[0-9A-Za-z_.]+$', mysqlvar) is None:
module.fail_json(msg="invalid variable name \"%s\"" % mysqlvar)
if mysql_driver is None:
module.fail_json(msg=mysql_driver_fail_msg)
else:
warnings.filterwarnings('error', category=mysql_driver.Warning)
try:
cursor, db_conn = mysql_connect(module, user, password, config_file, ssl_cert, ssl_key, ssl_ca, db,

View file

@ -49,14 +49,6 @@ def is_exclude(exclude_list, test_suite):
if excl.get('python') == test_suite.get('python'):
match += 1
if 'connector_name' in excl:
if excl.get('connector_name') == test_suite.get('connector_name'):
match += 1
if 'connector_version' in excl:
if excl.get('connector_version') == test_suite.get('connector_version'):
match += 1
if match > 1:
test_is_excluded = True
return test_is_excluded
@ -74,37 +66,29 @@ def main():
for db_engine_name in tests_matrix_yaml.get('db_engine_name'):
for db_engine_version in tests_matrix_yaml.get('db_engine_version'):
for python in tests_matrix_yaml.get('python'):
for connector_name in tests_matrix_yaml.get('connector_name'):
for connector_version in tests_matrix_yaml.get('connector_version'):
test_suite = {
'ansible': ansible,
'db_engine_name': db_engine_name,
'db_engine_version': db_engine_version,
'python': python,
'connector_name': connector_name,
'connector_version': connector_version
}
if not is_exclude(exclude_list, test_suite):
matrix.append(test_suite)
test_suite = {
'ansible': ansible,
'db_engine_name': db_engine_name,
'db_engine_version': db_engine_version,
'python': python,
}
if not is_exclude(exclude_list, test_suite):
matrix.append(test_suite)
for tests in matrix:
a = tests.get('ansible')
dn = tests.get('db_engine_name')
dv = tests.get('db_engine_version')
p = tests.get('python')
cn = tests.get('connector_name')
cv = tests.get('connector_version')
make_cmd = (
f'make '
f'ansible="{a}" '
f'db_engine_name="{dn}" '
f'db_engine_version="{dv}" '
f'python="{p}" '
f'connector_name="{cn}" '
f'connector_version="{cv}" '
f'test-integration'
)
print(f'Run tests for: Ansible: {a}, DB: {dn} {dv}, Python: {p}, Connector: {cn} {cv}')
print(f'Run tests for: Ansible: {a}, DB: {dn} {dv}, Python: {p}')
os.system(make_cmd)
# TODO, allow for CTRL+C to break the loop more easily
# TODO, store the failures from this iteration

View file

@ -4,6 +4,14 @@
# and should not be used as examples of how to write Ansible roles #
####################################################################
- name: "{{ role_name }} | Mains | Install required packages for testing"
ansible.builtin.package:
name:
- bzip2 # To test mysql_db dump compression
- iproute2 # To gather network facts
- mariadb-client # Can't install both mysql_client, had to make a choice
state: present
- name: Prepare the fake root folder
ansible.builtin.import_tasks:
file: fake_root.yml

View file

@ -1,23 +1,11 @@
---
- name: "{{ role_name }} | Setvars | Extract Podman/Docker Network Gateway"
ansible.builtin.shell:
cmd: ip route|grep default|awk '{print $3}'
register: ip_route_output
- name: "{{ role_name }} | Setvars | Gather facts"
ansible.builtin.setup:
- name: "{{ role_name }} | Setvars | Set Fact"
ansible.builtin.set_fact:
gateway_addr: "{{ ip_route_output.stdout }}"
connector_name_lookup: >-
{{ lookup(
'file',
'/root/ansible_collections/community/mysql/tests/integration/connector_name'
) }}
connector_version_lookup: >-
{{ lookup(
'file',
'/root/ansible_collections/community/mysql/tests/integration/connector_version'
) }}
gateway_addr: "{{ ansible_default_ipv4.gateway }}"
db_engine_name_lookup: >-
{{ lookup(
'file',
@ -41,8 +29,6 @@
- name: "{{ role_name }} | Setvars | Set Fact using above facts"
ansible.builtin.set_fact:
connector_name: "{{ connector_name_lookup.strip() }}"
connector_version: "{{ connector_version_lookup.strip() }}"
db_engine: "{{ db_engine_name_lookup.strip() }}"
db_version: "{{ db_engine_version_lookup.strip() }}"
python_version: "{{ python_version_lookup.strip() }}"
@ -69,8 +55,6 @@
- name: "{{ role_name }} | Setvars | Output test informations"
vars:
msg: |-
connector_name: {{ connector_name }}
connector_version: {{ connector_version }}
db_engine: {{ db_engine }}
db_version: {{ db_version }}
python_version: {{ python_version }}

View file

@ -1,63 +1,35 @@
---
- vars:
mysql_parameters: &mysql_params
login_user: root
login_password: msandbox
login_host: "{{ gateway_addr }}"
login_port: 3307
- name: Query Primary container over TCP for MySQL/MariaDB version
community.mysql.mysql_info:
login_user: root
login_password: msandbox
login_host: "{{ gateway_addr }}"
login_port: 3307
filter:
- version
register: primary_info
failed_when:
- registred_db_version != db_version
vars:
registred_db_version:
"{{ primary_info.version.major }}.{{ primary_info.version.minor }}\
.{{ primary_info.version.release }}"
block:
- name: Assert that expected Python is installed
ansible.builtin.command:
cmd: python{{ python_version }} -V
changed_when: false
register: python_in_use
failed_when:
- python_in_use.stdout is not search(python_version)
- name: Query Primary container over TCP for MySQL/MariaDB version
mysql_info:
<<: *mysql_params
filter:
- version
register: primary_info
- name: Assert that test container runs the expected MySQL/MariaDB version
assert:
that:
- registred_db_version == db_version
vars:
registred_db_version:
"{{ primary_info.version.major }}.{{ primary_info.version.minor }}\
.{{ primary_info.version.release }}"
- name: Assert that mysql_info module used the expected version of pymysql
assert:
that:
- primary_info.connector_name == connector_name
- primary_info.connector_version == connector_version
when:
- connector_name == 'pymysql'
- name: Assert that mysql_info module used the expected version of mysqlclient
assert:
that:
- primary_info.connector_name == 'MySQLdb'
- primary_info.connector_version == connector_version
when:
- connector_name == 'mysqlclient'
- name: Display the python version in use
command:
cmd: python{{ python_version }} -V
changed_when: false
register: python_in_use
- name: Assert that expected Python is installed
assert:
that:
- python_in_use.stdout is search(python_version)
- name: Assert that we run the expected ansible version
assert:
that:
- ansible_running_version == test_ansible_version
vars:
ansible_running_version:
"{{ ansible_version.major }}.{{ ansible_version.minor }}"
when:
- test_ansible_version != 'devel' # Devel will change overtime
- name: Assert that we run the expected ansible version
ansible.builtin.assert:
that:
- ansible_running_version == test_ansible_version
vars:
ansible_running_version:
"{{ ansible_version.major }}.{{ ansible_version.minor }}"
when:
- test_ansible_version != 'devel' # Devel will change overtime

View file

@ -15,39 +15,18 @@
- name: Config overrides | Add blank line
shell: 'echo "" >> {{ config_file }}'
when:
- >
connector_name != 'pymysql'
or (
connector_name == 'pymysql'
and connector_version is version('0.9.3', '>=')
)
- name: Config overrides | Create include_dir
file:
path: '{{ include_dir }}'
state: directory
mode: '0777'
when:
- >
connector_name != 'pymysql'
or (
connector_name == 'pymysql'
and connector_version is version('0.9.3', '>=')
)
- name: Config overrides | Add include_dir
lineinfile:
path: '{{ config_file }}'
line: '!includedir {{ include_dir }}'
insertafter: EOF
when:
- >
connector_name != 'pymysql'
or (
connector_name == 'pymysql'
and connector_version is version('0.9.3', '>=')
)
- name: Config overrides | Create database using fake port to connect to, must fail
mysql_db:

View file

@ -49,19 +49,8 @@
login_port: '{{ mysql_primary_port }}'
ca_cert: /tmp/cert.pem
register: result
ignore_errors: yes
- assert:
that:
- result is failed
when:
- connector_name == 'pymysql'
- assert:
that:
- result is succeeded
when:
- connector_name != 'pymysql'
failed_when:
- result is success
- name: attempt connection with newly created user ignoring hostname
mysql_db:
@ -74,11 +63,6 @@
ca_cert: /tmp/cert.pem
check_hostname: no
register: result
ignore_errors: yes
- assert:
that:
- result is succeeded or 'pymysql >= 0.7.11 is required' in result.msg
- name: Drop mysql user
mysql_user:

View file

@ -1,30 +0,0 @@
---
# Added in 3.6.0 in
# https://github.com/ansible-collections/community.mysql/pull/497
- name: Connector info | Assert connector_name exists and has expected values
ansible.builtin.assert:
that:
- result.connector_name is defined
- result.connector_name is in ['pymysql', 'MySQLdb']
success_msg: >-
Assertions passed, result.connector_name is {{ result.connector_name }}
fail_msg: >-
Assertion failed, result.connector_name is
{{ result.connector_name | d('Unknown')}} which is different than expected
pymysql or MySQLdb
- name: Connector info | Assert connector_version exists and has expected values
ansible.builtin.assert:
that:
- result.connector_version is defined
- >
result.connector_version == 'Unknown'
or result.connector_version is version(connector_version, '==')
success_msg: >-
Assertions passed, result.connector_version is
{{ result.connector_version }}
fail_msg: >-
Assertion failed, result.connector_version is
{{ result.connector_version }} which is different than expected
{{ connector_version }}

View file

@ -48,19 +48,8 @@
login_port: '{{ mysql_primary_port }}'
ca_cert: /tmp/cert.pem
register: result
ignore_errors: yes
- assert:
that:
- result is failed
when:
- connector_name == 'pymysql'
- assert:
that:
- result is succeeded
when:
- connector_name != 'pymysql'
failed_when:
- result is success
- name: attempt connection with newly created user ignoring hostname
mysql_info:

View file

@ -57,10 +57,6 @@
- result.engines != {}
- result.users != {}
- name: mysql_info - Test connector informations display
ansible.builtin.import_tasks:
file: connector_info.yml
# Access by non-default cred file
- name: mysql_info - check non-default cred file
mysql_info:

View file

@ -48,19 +48,9 @@
login_port: '{{ mysql_primary_port }}'
ca_cert: /tmp/cert.pem
register: result
ignore_errors: yes
- assert:
that:
- result is failed
when:
- connector_name == 'pymysql'
- assert:
that:
- result is succeeded
when:
- connector_name != 'pymysql'
ignore_errors: true
failed_when:
- result is success
- name: attempt connection with newly created user ignoring hostname
mysql_query:
@ -70,13 +60,8 @@
login_host: '{{ mysql_host }}'
login_port: '{{ mysql_primary_port }}'
ca_cert: /tmp/cert.pem
check_hostname: no
check_hostname: false
register: result
ignore_errors: yes
- assert:
that:
- result is succeeded or 'pymysql >= 0.7.11 is required' in result.msg
- name: Drop mysql user
mysql_user:

View file

@ -360,30 +360,14 @@
register: result
# Issue https://github.com/ansible-collections/community.mysql/issues/268
- name: Assert that create table IF NOT EXISTS is not changed with pymysql
# Though, the best thing would be to debug this and invert the condition...
- name: Assert that create table IF NOT EXISTS is changed
assert:
that:
# PyMySQL driver throws a warning for version before 0.10.0
- result is not changed
when:
- connector_name == 'pymysql'
- connector_version is version('0.10.0', '<')
# Issue https://github.com/ansible-collections/community.mysql/issues/268
- name: Assert that create table IF NOT EXISTS is changed with mysqlclient
assert:
that:
# Mysqlclient 2.0.1 and pymysql 0.10.0+ drivers throws no warning,
# pymysql 0.10.0+ drivers throws no warning,
# so it's impossible to figure out if the state was changed or not.
# We assume that it was for DDL queries by default in the code
- result is changed
when:
- >
connector_name == 'mysqlclient'
or (
connector_name == 'pymysql'
and connector_version is version('0.10.0', '>')
)
- name: Drop db {{ test_db }}
mysql_query:

View file

@ -48,19 +48,8 @@
login_port: '{{ mysql_primary_port }}'
ca_cert: /tmp/cert.pem
register: result
ignore_errors: yes
- assert:
that:
- result is failed
when:
- connector_name == 'pymysql'
- assert:
that:
- result is succeeded
when:
- connector_name != 'pymysql'
failed_when:
- result is success
- name: attempt connection with newly created user ignoring hostname
mysql_replication:
@ -70,13 +59,7 @@
login_host: '{{ mysql_host }}'
login_port: '{{ mysql_primary_port }}'
ca_cert: /tmp/cert.pem
check_hostname: no
register: result
ignore_errors: yes
- assert:
that:
- result is succeeded or 'pymysql >= 0.7.11 is required' in result.msg
check_hostname: false
- name: Drop mysql user
mysql_user:

View file

@ -259,14 +259,11 @@
fail_on_error: true
register: result
# mysqlclient 2.0.1 and pymysql 0.10.0+ always return "changed"
- name: Assert that startreplica is not changed
assert:
that:
- result is not changed
when:
- connector_name == 'pymysql'
- connector_version is version('0.10.0', '<')
# # pymysql 0.10.0+ always return "changed"
# - name: Assert that startreplica is not changed
# assert:
# that:
# - result is not changed
# Test stopreplica mode:
- name: Stop replica
@ -286,23 +283,20 @@
ansible.builtin.wait_for:
timeout: 2
# Test stopreplica mode:
# mysqlclient 2.0.1 and pymysql 0.10.0+ always return "changed"
- name: Stop replica that is no longer running
mysql_replication:
<<: *mysql_params
login_port: '{{ mysql_replica1_port }}'
mode: stopreplica
fail_on_error: true
register: result
# # Test stopreplica mode:
# # pymysql 0.10.0+ always return "changed"
# - name: Stop replica that is no longer running
# mysql_replication:
# <<: *mysql_params
# login_port: '{{ mysql_replica1_port }}'
# mode: stopreplica
# fail_on_error: true
# register: result
- name: Assert that stopreplica is not changed
assert:
that:
- result is not changed
when:
- connector_name == 'pymysql'
- connector_version is version('0.10.0', '<')
# - name: Assert that stopreplica is not changed
# assert:
# that:
# - result is not changed
# master / slave related choices were removed in 3.0.0
# https://github.com/ansible-collections/community.mysql/pull/252

View file

@ -50,21 +50,8 @@
login_port: '{{ mysql_primary_port }}'
ca_cert: /tmp/cert.pem
register: result
ignore_errors: true
- name: Issue-28 | Assert connection failed
assert:
that:
- result is failed
when:
- connector_name == 'pymysql'
- name: Issue-28 | Assert connection succeeded
assert:
that:
- result is succeeded
when:
- connector_name != 'pymysql'
failed_when:
- result is success
- name: Issue-28 | Attempt connection with newly created user ignoring hostname
mysql_user:
@ -77,13 +64,6 @@
login_port: '{{ mysql_primary_port }}'
ca_cert: /tmp/cert.pem
check_hostname: false
register: result
ignore_errors: true
- name: Issue-28 | Assert connection succeeded
assert:
that:
- result is succeeded or 'pymysql >= 0.7.11 is required' in result.msg
- name: Issue-28 | Drop mysql user
mysql_user:

View file

@ -403,75 +403,66 @@
# plugins that are loaded by default are sha2*, but these aren't compatible with pymysql < 0.9, so skip these tests
# for those versions.
#
- name: Plugin auth | Test plugin auth switching which doesn't work on pymysql < 0.9
when:
- >
connector_name != 'pymysql'
or (
connector_name == 'pymysql'
and connector_version is version('0.9', '>=')
)
block:
- name: Plugin auth | Create user with plugin auth (empty auth string)
mysql_user:
<<: *mysql_params
name: '{{ test_user_name }}'
plugin: '{{ test_plugin_type }}'
priv: '{{ test_default_priv }}'
register: result
- name: Plugin auth | Create user with plugin auth (empty auth string)
mysql_user:
<<: *mysql_params
name: '{{ test_user_name }}'
plugin: '{{ test_plugin_type }}'
priv: '{{ test_default_priv }}'
register: result
- name: Plugin auth | Get user information (empty auth string)
command: "{{ mysql_command }} -e \"SHOW CREATE USER '{{ test_user_name }}'@'localhost'\""
register: show_create_user
- name: Plugin auth | Get user information (empty auth string)
command: "{{ mysql_command }} -e \"SHOW CREATE USER '{{ test_user_name }}'@'localhost'\""
register: show_create_user
- name: Plugin auth | Check that the module made a change (empty auth string)
assert:
that:
- result is changed
- name: Plugin auth | Check that the module made a change (empty auth string)
assert:
that:
- result is changed
- name: Plugin auth | Check that the expected plugin type is set (empty auth string)
assert:
that:
- test_plugin_type in show_create_user.stdout
when: db_engine == 'mysql' or (db_engine == 'mariadb' and db_version is version('10.3', '>='))
- name: Plugin auth | Check that the expected plugin type is set (empty auth string)
assert:
that:
- test_plugin_type in show_create_user.stdout
when: db_engine == 'mysql' or (db_engine == 'mariadb' and db_version is version('10.3', '>='))
- include_tasks: utils/assert_user.yml
vars:
user_name: "{{ test_user_name }}"
user_host: localhost
priv: "{{ test_default_priv_type }}"
- include_tasks: utils/assert_user.yml
vars:
user_name: "{{ test_user_name }}"
user_host: localhost
priv: "{{ test_default_priv_type }}"
- name: Plugin auth | Switch user to sha256_password auth plugin
mysql_user:
<<: *mysql_params
name: '{{ test_user_name }}'
plugin: sha256_password
priv: '{{ test_default_priv }}'
register: result
- name: Plugin auth | Switch user to sha256_password auth plugin
mysql_user:
<<: *mysql_params
name: '{{ test_user_name }}'
plugin: sha256_password
priv: '{{ test_default_priv }}'
register: result
- name: Plugin auth | Get user information (sha256_password)
command: "{{ mysql_command }} -e \"SHOW CREATE USER '{{ test_user_name }}'@'localhost'\""
register: show_create_user
- name: Plugin auth | Get user information (sha256_password)
command: "{{ mysql_command }} -e \"SHOW CREATE USER '{{ test_user_name }}'@'localhost'\""
register: show_create_user
- name: Plugin auth | Check that the module made a change (sha256_password)
assert:
that:
- result is changed
- name: Plugin auth | Check that the module made a change (sha256_password)
assert:
that:
- result is changed
- name: Plugin auth | Check that the expected plugin type is set (sha256_password)
assert:
that:
- "'sha256_password' in show_create_user.stdout"
when: db_engine == 'mysql' or (db_engine == 'mariadb' and db_version is version('10.3', '>='))
- name: Plugin auth | Check that the expected plugin type is set (sha256_password)
assert:
that:
- "'sha256_password' in show_create_user.stdout"
when: db_engine == 'mysql' or (db_engine == 'mariadb' and db_version is version('10.3', '>='))
- include_tasks: utils/assert_user.yml
vars:
user_name: "{{ test_user_name }}"
user_host: localhost
priv: "{{ test_default_priv_type }}"
- include_tasks: utils/assert_user.yml
vars:
user_name: "{{ test_user_name }}"
user_host: localhost
priv: "{{ test_default_priv_type }}"
# Cleanup
- include_tasks: utils/remove_user.yml
vars:
user_name: "{{ test_user_name }}"
# Cleanup
- include_tasks: utils/remove_user.yml
vars:
user_name: "{{ test_user_name }}"

View file

@ -48,19 +48,8 @@
login_port: '{{ mysql_primary_port }}'
ca_cert: /tmp/cert.pem
register: result
ignore_errors: yes
- assert:
that:
- result is failed
when:
- connector_name == 'pymysql'
- assert:
that:
- result is succeeded
when:
- connector_name != 'pymysql'
failed_when:
- result is success
- name: attempt connection with newly created user ignoring hostname
mysql_variables:
@ -70,13 +59,7 @@
login_host: '{{ mysql_host }}'
login_port: '{{ mysql_primary_port }}'
ca_cert: /tmp/cert.pem
check_hostname: no
register: result
ignore_errors: yes
- assert:
that:
- result is succeeded or 'pymysql >= 0.7.11 is required' in result.msg
check_hostname: false
- name: Drop mysql user
mysql_user:

View file

@ -188,16 +188,14 @@
output: "{{ oor_result }}"
var_name: max_connect_errors
var_value: 1
when:
- connector_name == 'mysqlclient'
- db_engine == 'mysql' # mysqlclient returns "changed" with MariaDB
- include_tasks: assert_fail_msg.yml
vars:
output: "{{ oor_result }}"
msg: 'Truncated incorrect'
when:
- connector_name == 'pymsql'
# pymysql apply the invalid value without errors:
# msg: "Variable change succeeded prev_value=100"
# query: "SET GLOBAL `max_connect_errors` = -1"
# - include_tasks: assert_fail_msg.yml
# vars:
# output: "{{ oor_result }}"
# msg: 'Truncated incorrect'
# ============================================================
# Verify mysql_variable fails when setting an incorrect value (incorrect type)