Skip to content

Commit bedcad7

Browse files
authored
Merge pull request #582 from YAtOff/fix/parse-default-column-charset
Fix an issue in parsing default column charset
2 parents 82240b7 + 4b31b5b commit bedcad7

File tree

2 files changed

+48
-7
lines changed

2 files changed

+48
-7
lines changed

pymysqlreplication/row_event.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1005,14 +1005,17 @@ def _parsed_column_charset_by_default_charset(
10051005
column_type_detect_function,
10061006
):
10071007
column_charset = []
1008+
position = 0
10081009
for i in range(self.column_count):
10091010
column_type = self.columns[i].type
10101011
if not column_type_detect_function(column_type, dbms=self.dbms):
10111012
continue
1012-
elif i not in column_charset_collation.keys():
1013-
column_charset.append(default_charset_collation)
10141013
else:
1015-
column_charset.append(column_charset_collation[i])
1014+
if position not in column_charset_collation.keys():
1015+
column_charset.append(default_charset_collation)
1016+
else:
1017+
column_charset.append(column_charset_collation[position])
1018+
position += 1
10161019

10171020
return column_charset
10181021

pymysqlreplication/tests/test_basic.py

Lines changed: 42 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -420,6 +420,44 @@ def test_minimal_image_update_row_event(self):
420420
self.assertEqual(event.rows[0]["after_values"]["id"], None)
421421
self.assertEqual(event.rows[0]["after_values"]["data"], "World")
422422

423+
def test_default_charset_parsing(self):
424+
"""
425+
Here, we want the database to include the binary charset into
426+
the DEFAULT_CHARSET optional metadata block.
427+
Also, we are adding an int field and two text fields to force
428+
a difference in the index of the blob column in the table
429+
and in the list of columns that have charset.
430+
"""
431+
query = """CREATE TABLE test (
432+
id INT NOT NULL AUTO_INCREMENT,
433+
text1 VARCHAR(255) NOT NULL,
434+
text2 VARCHAR(255) NOT NULL,
435+
data LONGBLOB NOT NULL,
436+
PRIMARY KEY (id)
437+
) DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;"""
438+
self.execute(query)
439+
query = "INSERT INTO test (text1, text2, data) VALUES(%s, %s, %s)"
440+
self.execute_with_args(query, ("text", "text", b"data"))
441+
self.execute("COMMIT")
442+
443+
self.assertIsInstance(self.stream.fetchone(), RotateEvent)
444+
self.assertIsInstance(self.stream.fetchone(), FormatDescriptionEvent)
445+
# QueryEvent for the Create Table
446+
self.assertIsInstance(self.stream.fetchone(), QueryEvent)
447+
# QueryEvent for the BEGIN
448+
self.assertIsInstance(self.stream.fetchone(), QueryEvent)
449+
450+
event = self.stream.fetchone()
451+
self.assertIsInstance(event, TableMapEvent)
452+
if event.table_map[event.table_id].column_name_flag:
453+
columns = {c.name: c for c in event.columns}
454+
assert columns["text1"].character_set_name == "utf8"
455+
assert columns["text1"].collation_name.startswith("utf8")
456+
assert columns["text2"].character_set_name == "utf8"
457+
assert columns["text2"].collation_name.startswith("utf8")
458+
assert columns["data"].character_set_name == "binary"
459+
assert columns["data"].collation_name == "binary"
460+
423461
def test_log_pos(self):
424462
query = "CREATE TABLE test (id INT NOT NULL AUTO_INCREMENT, data VARCHAR (50) NOT NULL, PRIMARY KEY (id))"
425463
self.execute(query)
@@ -1859,7 +1897,7 @@ def setUp(self):
18591897
def test_json_partial_update(self):
18601898
create_query = "CREATE TABLE test_json_v2 (id INT, c JSON,PRIMARY KEY (id)) ;"
18611899
column_add_query = "ALTER TABLE test_json_v2 ADD COLUMN d JSON DEFAULT NULL, ADD COLUMN e JSON DEFAULT NULL;"
1862-
insert_query = """INSERT INTO test_json_v2 VALUES
1900+
insert_query = """INSERT INTO test_json_v2 VALUES
18631901
(101
18641902
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
18651903
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
@@ -1896,7 +1934,7 @@ def test_json_partial_update_column_value_none(self):
18961934
drop_table_if_exists_query = "DROP TABLE IF EXISTS test_json_v2;"
18971935
create_query = "CREATE TABLE test_json_v2 (id INT, c JSON,PRIMARY KEY (id)) ;"
18981936
column_add_query = "ALTER TABLE test_json_v2 ADD COLUMN d JSON DEFAULT NULL, ADD COLUMN e JSON DEFAULT NULL;"
1899-
insert_query = """INSERT INTO test_json_v2 VALUES
1937+
insert_query = """INSERT INTO test_json_v2 VALUES
19001938
(101
19011939
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
19021940
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
@@ -1934,7 +1972,7 @@ def test_json_partial_update_json_remove(self):
19341972
drop_table_if_exists_query = "DROP TABLE IF EXISTS test_json_v2;"
19351973
create_query = "CREATE TABLE test_json_v2 (id INT, c JSON,PRIMARY KEY (id)) ;"
19361974
column_add_query = "ALTER TABLE test_json_v2 ADD COLUMN d JSON DEFAULT NULL, ADD COLUMN e JSON DEFAULT NULL;"
1937-
insert_query = """INSERT INTO test_json_v2 VALUES
1975+
insert_query = """INSERT INTO test_json_v2 VALUES
19381976
(101
19391977
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
19401978
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
@@ -1974,7 +2012,7 @@ def test_json_partial_update_two_column(self):
19742012
drop_table_if_exists_query = "DROP TABLE IF EXISTS test_json_v2;"
19752013
create_query = "CREATE TABLE test_json_v2 (id INT, c JSON,PRIMARY KEY (id)) ;"
19762014
column_add_query = "ALTER TABLE test_json_v2 ADD COLUMN d JSON DEFAULT NULL, ADD COLUMN e JSON DEFAULT NULL;"
1977-
insert_query = """INSERT INTO test_json_v2 VALUES
2015+
insert_query = """INSERT INTO test_json_v2 VALUES
19782016
(101
19792017
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'
19802018
,'{"a":"aaaaaaaaaaaaa", "c":"ccccccccccccccc", "ab":["abababababababa", "babababababab"]}'

0 commit comments

Comments
 (0)