'use strict'; const FieldFlags = require('../constants/field_flags.js'); const Charsets = require('../constants/charsets.js'); const Types = require('../constants/types.js'); const helpers = require('../helpers'); const genFunc = require('generate-function'); const parserCache = require('./parser_cache.js'); const typeNames = []; for (const t in Types) { typeNames[Types[t]] = t; } function readCodeFor(field, config, options, fieldNum) { const supportBigNumbers = Boolean( options.supportBigNumbers || config.supportBigNumbers, ); const bigNumberStrings = Boolean( options.bigNumberStrings || config.bigNumberStrings, ); const timezone = options.timezone || config.timezone; const dateStrings = options.dateStrings || config.dateStrings; const unsigned = field.flags & FieldFlags.UNSIGNED; switch (field.columnType) { case Types.TINY: return unsigned ? 'packet.readInt8();' : 'packet.readSInt8();'; case Types.SHORT: return unsigned ? 'packet.readInt16();' : 'packet.readSInt16();'; case Types.LONG: case Types.INT24: // in binary protocol int24 is encoded in 4 bytes int32 return unsigned ? 'packet.readInt32();' : 'packet.readSInt32();'; case Types.YEAR: return 'packet.readInt16()'; case Types.FLOAT: return 'packet.readFloat();'; case Types.DOUBLE: return 'packet.readDouble();'; case Types.NULL: return 'null;'; case Types.DATE: case Types.DATETIME: case Types.TIMESTAMP: case Types.NEWDATE: if (helpers.typeMatch(field.columnType, dateStrings, Types)) { return `packet.readDateTimeString(${parseInt(field.decimals, 10)});`; } return `packet.readDateTime(${helpers.srcEscape(timezone)});`; case Types.TIME: return 'packet.readTimeString()'; case Types.DECIMAL: case Types.NEWDECIMAL: if (config.decimalNumbers) { return 'packet.parseLengthCodedFloat();'; } return 'packet.readLengthCodedString("ascii");'; case Types.GEOMETRY: return 'packet.parseGeometryValue();'; case Types.VECTOR: return 'packet.parseVector()'; case Types.JSON: // Since for JSON columns mysql always returns charset 63 (BINARY), // we have to handle it according to JSON specs and use "utf8", // see https://github.com/sidorares/node-mysql2/issues/409 return config.jsonStrings ? 'packet.readLengthCodedString("utf8")' : 'JSON.parse(packet.readLengthCodedString("utf8"));'; case Types.LONGLONG: if (!supportBigNumbers) { return unsigned ? 'packet.readInt64JSNumber();' : 'packet.readSInt64JSNumber();'; } if (bigNumberStrings) { return unsigned ? 'packet.readInt64String();' : 'packet.readSInt64String();'; } return unsigned ? 'packet.readInt64();' : 'packet.readSInt64();'; default: if (field.characterSet === Charsets.BINARY) { return 'packet.readLengthCodedBuffer();'; } return `packet.readLengthCodedString(fields[${fieldNum}].encoding)`; } } function compile(fields, options, config) { const parserFn = genFunc(); const nullBitmapLength = Math.floor((fields.length + 7 + 2) / 8); function wrap(field, packet) { return { type: typeNames[field.columnType], length: field.columnLength, db: field.schema, table: field.table, name: field.name, string: function (encoding = field.encoding) { if (field.columnType === Types.JSON && encoding === field.encoding) { // Since for JSON columns mysql always returns charset 63 (BINARY), // we have to handle it according to JSON specs and use "utf8", // see https://github.com/sidorares/node-mysql2/issues/1661 console.warn( `typeCast: JSON column "${field.name}" is interpreted as BINARY by default, recommended to manually set utf8 encoding: \`field.string("utf8")\``, ); } if ( [Types.DATETIME, Types.NEWDATE, Types.TIMESTAMP, Types.DATE].includes( field.columnType, ) ) { return packet.readDateTimeString(parseInt(field.decimals, 10)); } if (field.columnType === Types.TINY) { const unsigned = field.flags & FieldFlags.UNSIGNED; return String(unsigned ? packet.readInt8() : packet.readSInt8()); } if (field.columnType === Types.TIME) { return packet.readTimeString(); } return packet.readLengthCodedString(encoding); }, buffer: function () { return packet.readLengthCodedBuffer(); }, geometry: function () { return packet.parseGeometryValue(); }, }; } parserFn('(function(){'); parserFn('return class BinaryRow {'); parserFn('constructor() {'); parserFn('}'); parserFn('next(packet, fields, options) {'); if (options.rowsAsArray) { parserFn(`const result = new Array(${fields.length});`); } else { parserFn('const result = {};'); } // Global typeCast if ( typeof config.typeCast === 'function' && typeof options.typeCast !== 'function' ) { options.typeCast = config.typeCast; } parserFn('packet.readInt8();'); // status byte for (let i = 0; i < nullBitmapLength; ++i) { parserFn(`const nullBitmaskByte${i} = packet.readInt8();`); } let lvalue = ''; let currentFieldNullBit = 4; let nullByteIndex = 0; let fieldName = ''; let tableName = ''; for (let i = 0; i < fields.length; i++) { fieldName = helpers.fieldEscape(fields[i].name); // parserFn(`// ${fieldName}: ${typeNames[fields[i].columnType]}`); if (typeof options.nestTables === 'string') { lvalue = `result[${helpers.fieldEscape(fields[i].table + options.nestTables + fields[i].name)}]`; } else if (options.nestTables === true) { tableName = helpers.fieldEscape(fields[i].table); parserFn(`if (!result[${tableName}]) result[${tableName}] = {};`); lvalue = `result[${tableName}][${fieldName}]`; } else if (options.rowsAsArray) { lvalue = `result[${i.toString(10)}]`; } else { lvalue = `result[${fieldName}]`; } parserFn(`if (nullBitmaskByte${nullByteIndex} & ${currentFieldNullBit}) `); parserFn(`${lvalue} = null;`); parserFn('else {'); if (options.typeCast === false) { parserFn(`${lvalue} = packet.readLengthCodedBuffer();`); } else { const fieldWrapperVar = `fieldWrapper${i}`; parserFn(`const ${fieldWrapperVar} = wrap(fields[${i}], packet);`); const readCode = readCodeFor(fields[i], config, options, i); if (typeof options.typeCast === 'function') { parserFn( `${lvalue} = options.typeCast(${fieldWrapperVar}, function() { return ${readCode} });`, ); } else { parserFn(`${lvalue} = ${readCode};`); } } parserFn('}'); currentFieldNullBit *= 2; if (currentFieldNullBit === 0x100) { currentFieldNullBit = 1; nullByteIndex++; } } parserFn('return result;'); parserFn('}'); parserFn('};')('})()'); if (config.debug) { helpers.printDebugWithCode( 'Compiled binary protocol row parser', parserFn.toString(), ); } return parserFn.toFunction({ wrap }); } function getBinaryParser(fields, options, config) { return parserCache.getParser('binary', fields, options, config, compile); } module.exports = getBinaryParser;