diff --git a/benchmarks/benchmark-typecast.js b/benchmarks/benchmark-typecast.js new file mode 100644 index 0000000000..26d2fe198f --- /dev/null +++ b/benchmarks/benchmark-typecast.js @@ -0,0 +1,79 @@ +'use strict'; + +const createConnection = require('../test/common.test.cjs').createConnection; +const connection = createConnection(); +const NUM_SAMPLES = 10000; + +function typeCastRaw(field, next) { + if (field.type === 'VARCHAR') { + return field.string(); + } + + if (field.type === 'BINARY') { + return field.buffer().toString('ascii'); + } + + return next(); +} + +function typeCastValue(field, next) { + if (field.type === 'VARCHAR') { + return field.value(); + } + + if (field.type === 'BINARY') { + return field.value().toString('ascii'); + } + + return next(); +} + +async function benchmark(iterations, executor, typeCast) { + await new Promise((resolve, reject) => { + connection.query( + 'TRUNCATE benchmark_typecast', + (err) => { + if (err) reject(err); + resolve(); + }, + ); + }); + + await new Promise((resolve, reject) => { + connection.query( + 'INSERT INTO benchmark_typecast VALUES ("hello", 0x1234)', + (err) => { + if (err) reject(err); + resolve(); + }, + ); + }); + + const samples = []; + for (let i = 0; i < iterations; i++) { + const start = Date.now(); + await new Promise((resolve, reject) => { + connection[executor]({ sql: `SELECT * FROM benchmark_typecast`, typeCast }, (err) => { + if (err) reject(err); + resolve(); + }); + }); + samples.push(Date.now() - start); + } + + console.log( + `${executor} ${typeCast ? typeCast : 'raw'}: AVG ${samples.reduce((a, b) => a + b, 0) / samples.length}ms`, + ); +} + +connection.query( + 'CREATE TEMPORARY TABLE benchmark_typecast (v1 VARCHAR(16), v2 BINARY(4))', + async (err) => { + if (err) throw err; + await benchmark(NUM_SAMPLES, 'query'); + await benchmark(NUM_SAMPLES, 'query', typeCastRaw); + await benchmark(NUM_SAMPLES, 'query', typeCastValue); + + connection.end(); + }, +); diff --git a/lib/parsers/binary_parser.js b/lib/parsers/binary_parser.js index 1083204f04..c0f28c8b51 100644 --- a/lib/parsers/binary_parser.js +++ b/lib/parsers/binary_parser.js @@ -81,6 +81,76 @@ function readCodeFor(field, config, options, fieldNum) { } } +function readValueFor(field, config, options, packet) { + const supportBigNumbers = Boolean( + options.supportBigNumbers || config.supportBigNumbers, + ); + const bigNumberStrings = Boolean( + options.bigNumberStrings || config.bigNumberStrings, + ); + const timezone = options.timezone || config.timezone; + const dateStrings = options.dateStrings || config.dateStrings; + const unsigned = field.flags & FieldFlags.UNSIGNED; + switch (field.columnType) { + case Types.TINY: + return unsigned ? packet.readInt8() : packet.readSInt8(); + case Types.SHORT: + return unsigned ? packet.readInt16() : packet.readSInt16(); + case Types.LONG: + case Types.INT24: // in binary protocol int24 is encoded in 4 bytes int32 + return unsigned ? packet.readInt32() : packet.readSInt32(); + case Types.YEAR: + return packet.readInt16(); + case Types.FLOAT: + return packet.readFloat(); + case Types.DOUBLE: + return packet.readDouble(); + case Types.NULL: + return null; + case Types.DATE: + case Types.DATETIME: + case Types.TIMESTAMP: + case Types.NEWDATE: + if (helpers.typeMatch(field.columnType, dateStrings, Types)) { + return packet.readDateTimeString(field.decimals); + } + return packet.readDateTime(timezone); + case Types.TIME: + return packet.readTimeString(); + case Types.DECIMAL: + case Types.NEWDECIMAL: + if (config.decimalNumbers) { + return packet.parseLengthCodedFloat(); + } + return packet.readLengthCodedString("ascii"); + case Types.GEOMETRY: + return packet.parseGeometryValue(); + case Types.JSON: + // Since for JSON columns mysql always returns charset 63 (BINARY), + // we have to handle it according to JSON specs and use "utf8", + // see https://github.com/sidorares/node-mysql2/issues/409 + return JSON.parse(packet.readLengthCodedString("utf8")); + case Types.LONGLONG: + if (!supportBigNumbers) { + return unsigned + ? packet.readInt64JSNumber() + : packet.readSInt64JSNumber(); + } + if (bigNumberStrings) { + return unsigned + ? packet.readInt64String() + : packet.readSInt64String(); + } + return unsigned ? packet.readInt64() : packet.readSInt64(); + + default: + if (field.characterSet === Charsets.BINARY) { + return packet.readLengthCodedBuffer(); + } + return packet.readLengthCodedString(field.encoding) + } +} + function compile(fields, options, config) { const parserFn = genFunc(); const nullBitmapLength = Math.floor((fields.length + 7 + 2) / 8); @@ -110,6 +180,9 @@ function compile(fields, options, config) { geometry: function () { return packet.parseGeometryValue(); }, + value: function () { + return readValueFor(field, config, options, packet); + }, }; } @@ -208,6 +281,7 @@ function compile(fields, options, config) { parserFn.toString(), ); } + return parserFn.toFunction({ wrap }); } diff --git a/lib/parsers/text_parser.js b/lib/parsers/text_parser.js index 7e46514463..af02540755 100644 --- a/lib/parsers/text_parser.js +++ b/lib/parsers/text_parser.js @@ -72,6 +72,67 @@ function readCodeFor(type, charset, encodingExpr, config, options) { } } +function readValueFor(field, config, options, packet) { + const supportBigNumbers = Boolean( + options.supportBigNumbers || config.supportBigNumbers, + ); + const bigNumberStrings = Boolean( + options.bigNumberStrings || config.bigNumberStrings, + ); + const timezone = options.timezone || config.timezone; + const dateStrings = options.dateStrings || config.dateStrings; + + switch (field.columnType) { + case Types.TINY: + case Types.SHORT: + case Types.LONG: + case Types.INT24: + case Types.YEAR: + return packet.parseLengthCodedIntNoBigCheck(); + case Types.LONGLONG: + if (supportBigNumbers && bigNumberStrings) { + return packet.parseLengthCodedIntString(); + } + return packet.parseLengthCodedInt(supportBigNumbers); + case Types.FLOAT: + case Types.DOUBLE: + return packet.parseLengthCodedFloat(); + case Types.NULL: + return packet.readLengthCodedNumber(); + case Types.DECIMAL: + case Types.NEWDECIMAL: + if (config.decimalNumbers) { + return packet.parseLengthCodedFloat(); + } + return packet.readLengthCodedString('ascii'); + case Types.DATE: + if (helpers.typeMatch(field.columnType, dateStrings, Types)) { + return packet.readLengthCodedString('ascii'); + } + return packet.parseDate(timezone); + case Types.DATETIME: + case Types.TIMESTAMP: + if (helpers.typeMatch(field.columnType, dateStrings, Types)) { + return packet.readLengthCodedString('ascii'); + } + return packet.parseDateTime(timezone); + case Types.TIME: + return packet.readLengthCodedString('ascii'); + case Types.GEOMETRY: + return packet.parseGeometryValue(); + case Types.JSON: + // Since for JSON columns mysql always returns charset 63 (BINARY), + // we have to handle it according to JSON specs and use "utf8", + // see https://github.com/sidorares/node-mysql2/issues/409 + return JSON.parse(packet.readLengthCodedString('utf8')); + default: + if (field.characterSet === Charsets.BINARY) { + return packet.readLengthCodedBuffer(); + } + return packet.readLengthCodedString(field.encoding); + } +} + function compile(fields, options, config) { // use global typeCast if current query doesn't specify one if ( @@ -106,6 +167,9 @@ function compile(fields, options, config) { geometry: function () { return _this.packet.parseGeometryValue(); }, + value: function () { + return readValueFor(field, config, options, _this.packet); + }, }; } diff --git a/test/integration/connection/test-typecast-values-execute.test.cjs b/test/integration/connection/test-typecast-values-execute.test.cjs new file mode 100644 index 0000000000..7cecda2535 --- /dev/null +++ b/test/integration/connection/test-typecast-values-execute.test.cjs @@ -0,0 +1,49 @@ +'use strict'; + +const { assert } = require('poku'); + +const v1 = 'variable len'; +const v2 = true; +const v3 = '2024-04-18 15:48:14'; +const v4 = '1.23'; + +function typeCast(field, next) { + if (field.type === 'TINY') { + return field.value() === 1; + } + if (field.type === 'DATETIME') { + return new Date(field.value()); + } + return next(); +} + +function executeTests(res) { + const [{ v1: v1Actual, v2: v2Actual, v3: v3Actual, v4: v4Actual }] = res; + assert.equal(v1Actual, v1); + assert.equal(v2Actual, v2); + assert.equal(v3Actual.getTime(), new Date(v3).getTime()); + assert.equal(v4Actual, v4); +} + +const common = require('../../common.test.cjs'); +const connection = common.createConnection({ + typeCast: false, +}); + +connection.query( + `CREATE TEMPORARY TABLE typecast (v1 VARCHAR(16), v2 TINYINT(1), v3 DATETIME, v4 DECIMAL(10, 2))`, + (err) => { + if (err) throw err; + }, +); +connection.query( + `INSERT INTO typecast VALUES ('${v1}', ${v2},'${v3}', ${v4})`, + (err) => { + if (err) throw err; + }, +); +connection.execute({ sql: 'SELECT * FROM typecast', typeCast }, (err, res) => { + if (err) throw err; + executeTests(res); + connection.end(); +}); \ No newline at end of file diff --git a/test/integration/connection/test-typecast-values.test.cjs b/test/integration/connection/test-typecast-values.test.cjs new file mode 100644 index 0000000000..c6b10e4a48 --- /dev/null +++ b/test/integration/connection/test-typecast-values.test.cjs @@ -0,0 +1,49 @@ +'use strict'; + +const { assert } = require('poku'); + +const v1 = 'variable len'; +const v2 = true; +const v3 = '2024-04-18 15:48:14'; +const v4 = '1.23'; + +function typeCast(field, next) { + if (field.type === 'TINY') { + return field.value() === 1; + } + if (field.type === 'DATETIME') { + return new Date(field.value()); + } + return next(); +} + +function executeTests(res) { + const [{ v1: v1Actual, v2: v2Actual, v3: v3Actual, v4: v4Actual }] = res; + assert.equal(v1Actual, v1); + assert.equal(v2Actual, v2); + assert.equal(v3Actual.getTime(), new Date(v3).getTime()); + assert.equal(v4Actual, v4); +} + +const common = require('../../common.test.cjs'); +const connection = common.createConnection({ + typeCast: false, +}); + +connection.query( + `CREATE TEMPORARY TABLE typecast (v1 VARCHAR(16), v2 TINYINT(1), v3 DATETIME, v4 DECIMAL(10, 2))`, + (err) => { + if (err) throw err; + }, +); +connection.query( + `INSERT INTO typecast VALUES ('${v1}', ${v2},'${v3}', ${v4})`, + (err) => { + if (err) throw err; + }, +); +connection.query({ sql: 'SELECT * FROM typecast', typeCast }, (err, res) => { + if (err) throw err; + executeTests(res); + connection.end(); +}); \ No newline at end of file