moved around files
This commit is contained in:
231
user-mgmt_backend/node_modules/mysql2/lib/parsers/binary_parser.js
generated
vendored
Normal file
231
user-mgmt_backend/node_modules/mysql2/lib/parsers/binary_parser.js
generated
vendored
Normal file
@@ -0,0 +1,231 @@
|
||||
'use strict';
|
||||
|
||||
const FieldFlags = require('../constants/field_flags.js');
|
||||
const Charsets = require('../constants/charsets.js');
|
||||
const Types = require('../constants/types.js');
|
||||
const helpers = require('../helpers');
|
||||
const genFunc = require('generate-function');
|
||||
const parserCache = require('./parser_cache.js');
|
||||
const typeNames = [];
|
||||
for (const t in Types) {
|
||||
typeNames[Types[t]] = t;
|
||||
}
|
||||
|
||||
function readCodeFor(field, config, options, fieldNum) {
|
||||
const supportBigNumbers = Boolean(
|
||||
options.supportBigNumbers || config.supportBigNumbers
|
||||
);
|
||||
const bigNumberStrings = Boolean(
|
||||
options.bigNumberStrings || config.bigNumberStrings
|
||||
);
|
||||
const timezone = options.timezone || config.timezone;
|
||||
const dateStrings = options.dateStrings || config.dateStrings;
|
||||
const unsigned = field.flags & FieldFlags.UNSIGNED;
|
||||
switch (field.columnType) {
|
||||
case Types.TINY:
|
||||
return unsigned ? 'packet.readInt8();' : 'packet.readSInt8();';
|
||||
case Types.SHORT:
|
||||
return unsigned ? 'packet.readInt16();' : 'packet.readSInt16();';
|
||||
case Types.LONG:
|
||||
case Types.INT24: // in binary protocol int24 is encoded in 4 bytes int32
|
||||
return unsigned ? 'packet.readInt32();' : 'packet.readSInt32();';
|
||||
case Types.YEAR:
|
||||
return 'packet.readInt16()';
|
||||
case Types.FLOAT:
|
||||
return 'packet.readFloat();';
|
||||
case Types.DOUBLE:
|
||||
return 'packet.readDouble();';
|
||||
case Types.NULL:
|
||||
return 'null;';
|
||||
case Types.DATE:
|
||||
case Types.DATETIME:
|
||||
case Types.TIMESTAMP:
|
||||
case Types.NEWDATE:
|
||||
if (helpers.typeMatch(field.columnType, dateStrings, Types)) {
|
||||
return `packet.readDateTimeString(${parseInt(field.decimals, 10)}, ${null}, ${field.columnType});`;
|
||||
}
|
||||
return `packet.readDateTime(${helpers.srcEscape(timezone)});`;
|
||||
case Types.TIME:
|
||||
return 'packet.readTimeString()';
|
||||
case Types.DECIMAL:
|
||||
case Types.NEWDECIMAL:
|
||||
if (config.decimalNumbers) {
|
||||
return 'packet.parseLengthCodedFloat();';
|
||||
}
|
||||
return 'packet.readLengthCodedString("ascii");';
|
||||
case Types.GEOMETRY:
|
||||
return 'packet.parseGeometryValue();';
|
||||
case Types.VECTOR:
|
||||
return 'packet.parseVector()';
|
||||
case Types.JSON:
|
||||
// Since for JSON columns mysql always returns charset 63 (BINARY),
|
||||
// we have to handle it according to JSON specs and use "utf8",
|
||||
// see https://github.com/sidorares/node-mysql2/issues/409
|
||||
return config.jsonStrings
|
||||
? 'packet.readLengthCodedString("utf8")'
|
||||
: 'JSON.parse(packet.readLengthCodedString("utf8"));';
|
||||
case Types.LONGLONG:
|
||||
if (!supportBigNumbers) {
|
||||
return unsigned
|
||||
? 'packet.readInt64JSNumber();'
|
||||
: 'packet.readSInt64JSNumber();';
|
||||
}
|
||||
if (bigNumberStrings) {
|
||||
return unsigned
|
||||
? 'packet.readInt64String();'
|
||||
: 'packet.readSInt64String();';
|
||||
}
|
||||
return unsigned ? 'packet.readInt64();' : 'packet.readSInt64();';
|
||||
|
||||
default:
|
||||
if (field.characterSet === Charsets.BINARY) {
|
||||
return 'packet.readLengthCodedBuffer();';
|
||||
}
|
||||
return `packet.readLengthCodedString(fields[${fieldNum}].encoding)`;
|
||||
}
|
||||
}
|
||||
|
||||
function compile(fields, options, config) {
|
||||
const parserFn = genFunc();
|
||||
const nullBitmapLength = Math.floor((fields.length + 7 + 2) / 8);
|
||||
|
||||
function wrap(field, packet) {
|
||||
return {
|
||||
type: typeNames[field.columnType],
|
||||
length: field.columnLength,
|
||||
db: field.schema,
|
||||
table: field.table,
|
||||
name: field.name,
|
||||
string: function (encoding = field.encoding) {
|
||||
if (field.columnType === Types.JSON && encoding === field.encoding) {
|
||||
// Since for JSON columns mysql always returns charset 63 (BINARY),
|
||||
// we have to handle it according to JSON specs and use "utf8",
|
||||
// see https://github.com/sidorares/node-mysql2/issues/1661
|
||||
console.warn(
|
||||
`typeCast: JSON column "${field.name}" is interpreted as BINARY by default, recommended to manually set utf8 encoding: \`field.string("utf8")\``
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
[Types.DATETIME, Types.NEWDATE, Types.TIMESTAMP, Types.DATE].includes(
|
||||
field.columnType
|
||||
)
|
||||
) {
|
||||
return packet.readDateTimeString(parseInt(field.decimals, 10));
|
||||
}
|
||||
|
||||
if (field.columnType === Types.TINY) {
|
||||
const unsigned = field.flags & FieldFlags.UNSIGNED;
|
||||
|
||||
return String(unsigned ? packet.readInt8() : packet.readSInt8());
|
||||
}
|
||||
|
||||
if (field.columnType === Types.TIME) {
|
||||
return packet.readTimeString();
|
||||
}
|
||||
|
||||
return packet.readLengthCodedString(encoding);
|
||||
},
|
||||
buffer: function () {
|
||||
return packet.readLengthCodedBuffer();
|
||||
},
|
||||
geometry: function () {
|
||||
return packet.parseGeometryValue();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
parserFn('(function(){');
|
||||
parserFn('return class BinaryRow {');
|
||||
parserFn('constructor() {');
|
||||
parserFn('}');
|
||||
|
||||
parserFn('next(packet, fields, options) {');
|
||||
if (options.rowsAsArray) {
|
||||
parserFn(`const result = new Array(${fields.length});`);
|
||||
} else {
|
||||
parserFn('const result = {};');
|
||||
}
|
||||
|
||||
// Global typeCast
|
||||
if (
|
||||
typeof config.typeCast === 'function' &&
|
||||
typeof options.typeCast !== 'function'
|
||||
) {
|
||||
options.typeCast = config.typeCast;
|
||||
}
|
||||
|
||||
parserFn('packet.readInt8();'); // status byte
|
||||
for (let i = 0; i < nullBitmapLength; ++i) {
|
||||
parserFn(`const nullBitmaskByte${i} = packet.readInt8();`);
|
||||
}
|
||||
|
||||
let lvalue = '';
|
||||
let currentFieldNullBit = 4;
|
||||
let nullByteIndex = 0;
|
||||
let fieldName = '';
|
||||
let tableName = '';
|
||||
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
fieldName = helpers.fieldEscape(fields[i].name);
|
||||
// parserFn(`// ${fieldName}: ${typeNames[fields[i].columnType]}`);
|
||||
|
||||
if (typeof options.nestTables === 'string') {
|
||||
lvalue = `result[${helpers.fieldEscape(fields[i].table + options.nestTables + fields[i].name)}]`;
|
||||
} else if (options.nestTables === true) {
|
||||
tableName = helpers.fieldEscape(fields[i].table);
|
||||
|
||||
parserFn(`if (!result[${tableName}]) result[${tableName}] = {};`);
|
||||
lvalue = `result[${tableName}][${fieldName}]`;
|
||||
} else if (options.rowsAsArray) {
|
||||
lvalue = `result[${i.toString(10)}]`;
|
||||
} else {
|
||||
lvalue = `result[${fieldName}]`;
|
||||
}
|
||||
|
||||
parserFn(`if (nullBitmaskByte${nullByteIndex} & ${currentFieldNullBit}) `);
|
||||
parserFn(`${lvalue} = null;`);
|
||||
parserFn('else {');
|
||||
|
||||
if (options.typeCast === false) {
|
||||
parserFn(`${lvalue} = packet.readLengthCodedBuffer();`);
|
||||
} else {
|
||||
const fieldWrapperVar = `fieldWrapper${i}`;
|
||||
parserFn(`const ${fieldWrapperVar} = wrap(fields[${i}], packet);`);
|
||||
const readCode = readCodeFor(fields[i], config, options, i);
|
||||
|
||||
if (typeof options.typeCast === 'function') {
|
||||
parserFn(
|
||||
`${lvalue} = options.typeCast(${fieldWrapperVar}, function() { return ${readCode} });`
|
||||
);
|
||||
} else {
|
||||
parserFn(`${lvalue} = ${readCode};`);
|
||||
}
|
||||
}
|
||||
parserFn('}');
|
||||
|
||||
currentFieldNullBit *= 2;
|
||||
if (currentFieldNullBit === 0x100) {
|
||||
currentFieldNullBit = 1;
|
||||
nullByteIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
parserFn('return result;');
|
||||
parserFn('}');
|
||||
parserFn('};')('})()');
|
||||
|
||||
if (config.debug) {
|
||||
helpers.printDebugWithCode(
|
||||
'Compiled binary protocol row parser',
|
||||
parserFn.toString()
|
||||
);
|
||||
}
|
||||
return parserFn.toFunction({ wrap });
|
||||
}
|
||||
|
||||
function getBinaryParser(fields, options, config) {
|
||||
return parserCache.getParser('binary', fields, options, config, compile);
|
||||
}
|
||||
|
||||
module.exports = getBinaryParser;
|
66
user-mgmt_backend/node_modules/mysql2/lib/parsers/parser_cache.js
generated
vendored
Normal file
66
user-mgmt_backend/node_modules/mysql2/lib/parsers/parser_cache.js
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
'use strict';
|
||||
|
||||
const { createLRU } = require('lru.min');
|
||||
|
||||
const parserCache = createLRU({
|
||||
max: 15000,
|
||||
});
|
||||
|
||||
function keyFromFields(type, fields, options, config) {
|
||||
const res = [
|
||||
type,
|
||||
typeof options.nestTables,
|
||||
options.nestTables,
|
||||
Boolean(options.rowsAsArray),
|
||||
Boolean(options.supportBigNumbers || config.supportBigNumbers),
|
||||
Boolean(options.bigNumberStrings || config.bigNumberStrings),
|
||||
typeof options.typeCast,
|
||||
options.timezone || config.timezone,
|
||||
Boolean(options.decimalNumbers),
|
||||
options.dateStrings,
|
||||
];
|
||||
|
||||
for (let i = 0; i < fields.length; ++i) {
|
||||
const field = fields[i];
|
||||
|
||||
res.push([
|
||||
field.name,
|
||||
field.columnType,
|
||||
field.length,
|
||||
field.schema,
|
||||
field.table,
|
||||
field.flags,
|
||||
field.characterSet,
|
||||
]);
|
||||
}
|
||||
|
||||
return JSON.stringify(res, null, 0);
|
||||
}
|
||||
|
||||
function getParser(type, fields, options, config, compiler) {
|
||||
const key = keyFromFields(type, fields, options, config);
|
||||
let parser = parserCache.get(key);
|
||||
|
||||
if (parser) {
|
||||
return parser;
|
||||
}
|
||||
|
||||
parser = compiler(fields, options, config);
|
||||
parserCache.set(key, parser);
|
||||
return parser;
|
||||
}
|
||||
|
||||
function setMaxCache(max) {
|
||||
parserCache.resize(max);
|
||||
}
|
||||
|
||||
function clearCache() {
|
||||
parserCache.clear();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getParser: getParser,
|
||||
setMaxCache: setMaxCache,
|
||||
clearCache: clearCache,
|
||||
_keyFromFields: keyFromFields,
|
||||
};
|
211
user-mgmt_backend/node_modules/mysql2/lib/parsers/static_binary_parser.js
generated
vendored
Normal file
211
user-mgmt_backend/node_modules/mysql2/lib/parsers/static_binary_parser.js
generated
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
'use strict';
|
||||
|
||||
const FieldFlags = require('../constants/field_flags.js');
|
||||
const Charsets = require('../constants/charsets.js');
|
||||
const Types = require('../constants/types.js');
|
||||
const helpers = require('../helpers');
|
||||
|
||||
const typeNames = [];
|
||||
for (const t in Types) {
|
||||
typeNames[Types[t]] = t;
|
||||
}
|
||||
|
||||
function getBinaryParser(fields, _options, config) {
|
||||
function readCode(field, config, options, fieldNum, packet) {
|
||||
const supportBigNumbers = Boolean(
|
||||
options.supportBigNumbers || config.supportBigNumbers
|
||||
);
|
||||
const bigNumberStrings = Boolean(
|
||||
options.bigNumberStrings || config.bigNumberStrings
|
||||
);
|
||||
const timezone = options.timezone || config.timezone;
|
||||
const dateStrings = options.dateStrings || config.dateStrings;
|
||||
const unsigned = field.flags & FieldFlags.UNSIGNED;
|
||||
|
||||
switch (field.columnType) {
|
||||
case Types.TINY:
|
||||
return unsigned ? packet.readInt8() : packet.readSInt8();
|
||||
case Types.SHORT:
|
||||
return unsigned ? packet.readInt16() : packet.readSInt16();
|
||||
case Types.LONG:
|
||||
case Types.INT24: // in binary protocol int24 is encoded in 4 bytes int32
|
||||
return unsigned ? packet.readInt32() : packet.readSInt32();
|
||||
case Types.YEAR:
|
||||
return packet.readInt16();
|
||||
case Types.FLOAT:
|
||||
return packet.readFloat();
|
||||
case Types.DOUBLE:
|
||||
return packet.readDouble();
|
||||
case Types.NULL:
|
||||
return null;
|
||||
case Types.DATE:
|
||||
case Types.DATETIME:
|
||||
case Types.TIMESTAMP:
|
||||
case Types.NEWDATE:
|
||||
return helpers.typeMatch(field.columnType, dateStrings, Types)
|
||||
? packet.readDateTimeString(
|
||||
parseInt(field.decimals, 10),
|
||||
null,
|
||||
field.columnType
|
||||
)
|
||||
: packet.readDateTime(timezone);
|
||||
case Types.TIME:
|
||||
return packet.readTimeString();
|
||||
case Types.DECIMAL:
|
||||
case Types.NEWDECIMAL:
|
||||
return config.decimalNumbers
|
||||
? packet.parseLengthCodedFloat()
|
||||
: packet.readLengthCodedString('ascii');
|
||||
case Types.GEOMETRY:
|
||||
return packet.parseGeometryValue();
|
||||
case Types.VECTOR:
|
||||
return packet.parseVector();
|
||||
case Types.JSON:
|
||||
// Since for JSON columns mysql always returns charset 63 (BINARY),
|
||||
// we have to handle it according to JSON specs and use "utf8",
|
||||
// see https://github.com/sidorares/node-mysql2/issues/409
|
||||
return config.jsonStrings
|
||||
? packet.readLengthCodedString('utf8')
|
||||
: JSON.parse(packet.readLengthCodedString('utf8'));
|
||||
case Types.LONGLONG:
|
||||
if (!supportBigNumbers)
|
||||
return unsigned
|
||||
? packet.readInt64JSNumber()
|
||||
: packet.readSInt64JSNumber();
|
||||
return bigNumberStrings
|
||||
? unsigned
|
||||
? packet.readInt64String()
|
||||
: packet.readSInt64String()
|
||||
: unsigned
|
||||
? packet.readInt64()
|
||||
: packet.readSInt64();
|
||||
default:
|
||||
return field.characterSet === Charsets.BINARY
|
||||
? packet.readLengthCodedBuffer()
|
||||
: packet.readLengthCodedString(fields[fieldNum].encoding);
|
||||
}
|
||||
}
|
||||
|
||||
return class BinaryRow {
|
||||
constructor() {}
|
||||
|
||||
next(packet, fields, options) {
|
||||
packet.readInt8(); // status byte
|
||||
|
||||
const nullBitmapLength = Math.floor((fields.length + 7 + 2) / 8);
|
||||
const nullBitmaskBytes = new Array(nullBitmapLength);
|
||||
|
||||
for (let i = 0; i < nullBitmapLength; i++) {
|
||||
nullBitmaskBytes[i] = packet.readInt8();
|
||||
}
|
||||
|
||||
const result = options.rowsAsArray ? new Array(fields.length) : {};
|
||||
let currentFieldNullBit = 4;
|
||||
let nullByteIndex = 0;
|
||||
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
const field = fields[i];
|
||||
const typeCast =
|
||||
options.typeCast !== undefined ? options.typeCast : config.typeCast;
|
||||
|
||||
let value;
|
||||
if (nullBitmaskBytes[nullByteIndex] & currentFieldNullBit) {
|
||||
value = null;
|
||||
} else if (options.typeCast === false) {
|
||||
value = packet.readLengthCodedBuffer();
|
||||
} else {
|
||||
const next = () => readCode(field, config, options, i, packet);
|
||||
value =
|
||||
typeof typeCast === 'function'
|
||||
? typeCast(
|
||||
{
|
||||
type: typeNames[field.columnType],
|
||||
length: field.columnLength,
|
||||
db: field.schema,
|
||||
table: field.table,
|
||||
name: field.name,
|
||||
string: function (encoding = field.encoding) {
|
||||
if (
|
||||
field.columnType === Types.JSON &&
|
||||
encoding === field.encoding
|
||||
) {
|
||||
// Since for JSON columns mysql always returns charset 63 (BINARY),
|
||||
// we have to handle it according to JSON specs and use "utf8",
|
||||
// see https://github.com/sidorares/node-mysql2/issues/1661
|
||||
console.warn(
|
||||
`typeCast: JSON column "${field.name}" is interpreted as BINARY by default, recommended to manually set utf8 encoding: \`field.string("utf8")\``
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
[
|
||||
Types.DATETIME,
|
||||
Types.NEWDATE,
|
||||
Types.TIMESTAMP,
|
||||
Types.DATE,
|
||||
].includes(field.columnType)
|
||||
) {
|
||||
return packet.readDateTimeString(
|
||||
parseInt(field.decimals, 10)
|
||||
);
|
||||
}
|
||||
|
||||
if (field.columnType === Types.TINY) {
|
||||
const unsigned = field.flags & FieldFlags.UNSIGNED;
|
||||
|
||||
return String(
|
||||
unsigned ? packet.readInt8() : packet.readSInt8()
|
||||
);
|
||||
}
|
||||
|
||||
if (field.columnType === Types.TIME) {
|
||||
return packet.readTimeString();
|
||||
}
|
||||
|
||||
return packet.readLengthCodedString(encoding);
|
||||
},
|
||||
buffer: function () {
|
||||
return packet.readLengthCodedBuffer();
|
||||
},
|
||||
geometry: function () {
|
||||
return packet.parseGeometryValue();
|
||||
},
|
||||
},
|
||||
next
|
||||
)
|
||||
: next();
|
||||
}
|
||||
|
||||
if (options.rowsAsArray) {
|
||||
result[i] = value;
|
||||
} else if (typeof options.nestTables === 'string') {
|
||||
const key = helpers.fieldEscape(
|
||||
field.table + options.nestTables + field.name,
|
||||
false
|
||||
);
|
||||
result[key] = value;
|
||||
} else if (options.nestTables === true) {
|
||||
const tableName = helpers.fieldEscape(field.table, false);
|
||||
if (!result[tableName]) {
|
||||
result[tableName] = {};
|
||||
}
|
||||
const fieldName = helpers.fieldEscape(field.name, false);
|
||||
result[tableName][fieldName] = value;
|
||||
} else {
|
||||
const key = helpers.fieldEscape(field.name, false);
|
||||
result[key] = value;
|
||||
}
|
||||
|
||||
currentFieldNullBit *= 2;
|
||||
if (currentFieldNullBit === 0x100) {
|
||||
currentFieldNullBit = 1;
|
||||
nullByteIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = getBinaryParser;
|
152
user-mgmt_backend/node_modules/mysql2/lib/parsers/static_text_parser.js
generated
vendored
Normal file
152
user-mgmt_backend/node_modules/mysql2/lib/parsers/static_text_parser.js
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
'use strict';
|
||||
|
||||
const Types = require('../constants/types.js');
|
||||
const Charsets = require('../constants/charsets.js');
|
||||
const helpers = require('../helpers');
|
||||
|
||||
const typeNames = [];
|
||||
for (const t in Types) {
|
||||
typeNames[Types[t]] = t;
|
||||
}
|
||||
|
||||
function readField({ packet, type, charset, encoding, config, options }) {
|
||||
const supportBigNumbers = Boolean(
|
||||
options.supportBigNumbers || config.supportBigNumbers
|
||||
);
|
||||
const bigNumberStrings = Boolean(
|
||||
options.bigNumberStrings || config.bigNumberStrings
|
||||
);
|
||||
const timezone = options.timezone || config.timezone;
|
||||
const dateStrings = options.dateStrings || config.dateStrings;
|
||||
|
||||
switch (type) {
|
||||
case Types.TINY:
|
||||
case Types.SHORT:
|
||||
case Types.LONG:
|
||||
case Types.INT24:
|
||||
case Types.YEAR:
|
||||
return packet.parseLengthCodedIntNoBigCheck();
|
||||
case Types.LONGLONG:
|
||||
if (supportBigNumbers && bigNumberStrings) {
|
||||
return packet.parseLengthCodedIntString();
|
||||
}
|
||||
return packet.parseLengthCodedInt(supportBigNumbers);
|
||||
case Types.FLOAT:
|
||||
case Types.DOUBLE:
|
||||
return packet.parseLengthCodedFloat();
|
||||
case Types.NULL:
|
||||
case Types.DECIMAL:
|
||||
case Types.NEWDECIMAL:
|
||||
if (config.decimalNumbers) {
|
||||
return packet.parseLengthCodedFloat();
|
||||
}
|
||||
return packet.readLengthCodedString('ascii');
|
||||
case Types.DATE:
|
||||
if (helpers.typeMatch(type, dateStrings, Types)) {
|
||||
return packet.readLengthCodedString('ascii');
|
||||
}
|
||||
return packet.parseDate(timezone);
|
||||
case Types.DATETIME:
|
||||
case Types.TIMESTAMP:
|
||||
if (helpers.typeMatch(type, dateStrings, Types)) {
|
||||
return packet.readLengthCodedString('ascii');
|
||||
}
|
||||
return packet.parseDateTime(timezone);
|
||||
case Types.TIME:
|
||||
return packet.readLengthCodedString('ascii');
|
||||
case Types.GEOMETRY:
|
||||
return packet.parseGeometryValue();
|
||||
case Types.VECTOR:
|
||||
return packet.parseVector();
|
||||
case Types.JSON:
|
||||
// Since for JSON columns mysql always returns charset 63 (BINARY),
|
||||
// we have to handle it according to JSON specs and use "utf8",
|
||||
// see https://github.com/sidorares/node-mysql2/issues/409
|
||||
return config.jsonStrings
|
||||
? packet.readLengthCodedString('utf8')
|
||||
: JSON.parse(packet.readLengthCodedString('utf8'));
|
||||
default:
|
||||
if (charset === Charsets.BINARY) {
|
||||
return packet.readLengthCodedBuffer();
|
||||
}
|
||||
return packet.readLengthCodedString(encoding);
|
||||
}
|
||||
}
|
||||
|
||||
function createTypecastField(field, packet) {
|
||||
return {
|
||||
type: typeNames[field.columnType],
|
||||
length: field.columnLength,
|
||||
db: field.schema,
|
||||
table: field.table,
|
||||
name: field.name,
|
||||
string: function (encoding = field.encoding) {
|
||||
if (field.columnType === Types.JSON && encoding === field.encoding) {
|
||||
// Since for JSON columns mysql always returns charset 63 (BINARY),
|
||||
// we have to handle it according to JSON specs and use "utf8",
|
||||
// see https://github.com/sidorares/node-mysql2/issues/1661
|
||||
console.warn(
|
||||
`typeCast: JSON column "${field.name}" is interpreted as BINARY by default, recommended to manually set utf8 encoding: \`field.string("utf8")\``
|
||||
);
|
||||
}
|
||||
return packet.readLengthCodedString(encoding);
|
||||
},
|
||||
buffer: function () {
|
||||
return packet.readLengthCodedBuffer();
|
||||
},
|
||||
geometry: function () {
|
||||
return packet.parseGeometryValue();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function getTextParser(_fields, _options, config) {
|
||||
return {
|
||||
next(packet, fields, options) {
|
||||
const result = options.rowsAsArray ? [] : {};
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
const field = fields[i];
|
||||
const typeCast = options.typeCast ? options.typeCast : config.typeCast;
|
||||
const next = () =>
|
||||
readField({
|
||||
packet,
|
||||
type: field.columnType,
|
||||
encoding: field.encoding,
|
||||
charset: field.characterSet,
|
||||
config,
|
||||
options,
|
||||
});
|
||||
|
||||
let value;
|
||||
|
||||
if (options.typeCast === false) {
|
||||
value = packet.readLengthCodedBuffer();
|
||||
} else if (typeof typeCast === 'function') {
|
||||
value = typeCast(createTypecastField(field, packet), next);
|
||||
} else {
|
||||
value = next();
|
||||
}
|
||||
|
||||
if (options.rowsAsArray) {
|
||||
result.push(value);
|
||||
} else if (typeof options.nestTables === 'string') {
|
||||
result[
|
||||
`${helpers.fieldEscape(field.table, false)}${options.nestTables}${helpers.fieldEscape(field.name, false)}`
|
||||
] = value;
|
||||
} else if (options.nestTables) {
|
||||
const tableName = helpers.fieldEscape(field.table, false);
|
||||
if (!result[tableName]) {
|
||||
result[tableName] = {};
|
||||
}
|
||||
result[tableName][helpers.fieldEscape(field.name, false)] = value;
|
||||
} else {
|
||||
result[helpers.fieldEscape(field.name, false)] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = getTextParser;
|
50
user-mgmt_backend/node_modules/mysql2/lib/parsers/string.js
generated
vendored
Normal file
50
user-mgmt_backend/node_modules/mysql2/lib/parsers/string.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
'use strict';
|
||||
|
||||
const Iconv = require('iconv-lite');
|
||||
const { createLRU } = require('lru.min');
|
||||
|
||||
const decoderCache = createLRU({
|
||||
max: 500,
|
||||
});
|
||||
|
||||
exports.decode = function (buffer, encoding, start, end, options) {
|
||||
if (Buffer.isEncoding(encoding)) {
|
||||
return buffer.toString(encoding, start, end);
|
||||
}
|
||||
|
||||
// Optimize for common case: encoding="short_string", options=undefined.
|
||||
let decoder;
|
||||
if (!options) {
|
||||
decoder = decoderCache.get(encoding);
|
||||
if (!decoder) {
|
||||
decoder = Iconv.getDecoder(encoding);
|
||||
decoderCache.set(encoding, decoder);
|
||||
}
|
||||
} else {
|
||||
const decoderArgs = { encoding, options };
|
||||
const decoderKey = JSON.stringify(decoderArgs);
|
||||
decoder = decoderCache.get(decoderKey);
|
||||
if (!decoder) {
|
||||
decoder = Iconv.getDecoder(decoderArgs.encoding, decoderArgs.options);
|
||||
decoderCache.set(decoderKey, decoder);
|
||||
}
|
||||
}
|
||||
|
||||
const res = decoder.write(buffer.slice(start, end));
|
||||
const trail = decoder.end();
|
||||
|
||||
return trail ? res + trail : res;
|
||||
};
|
||||
|
||||
exports.encode = function (string, encoding, options) {
|
||||
if (Buffer.isEncoding(encoding)) {
|
||||
return Buffer.from(string, encoding);
|
||||
}
|
||||
|
||||
const encoder = Iconv.getEncoder(encoding, options || {});
|
||||
|
||||
const res = encoder.write(string);
|
||||
const trail = encoder.end();
|
||||
|
||||
return trail && trail.length > 0 ? Buffer.concat([res, trail]) : res;
|
||||
};
|
214
user-mgmt_backend/node_modules/mysql2/lib/parsers/text_parser.js
generated
vendored
Normal file
214
user-mgmt_backend/node_modules/mysql2/lib/parsers/text_parser.js
generated
vendored
Normal file
@@ -0,0 +1,214 @@
|
||||
'use strict';
|
||||
|
||||
const Types = require('../constants/types.js');
|
||||
const Charsets = require('../constants/charsets.js');
|
||||
const helpers = require('../helpers');
|
||||
const genFunc = require('generate-function');
|
||||
const parserCache = require('./parser_cache.js');
|
||||
|
||||
const typeNames = [];
|
||||
for (const t in Types) {
|
||||
typeNames[Types[t]] = t;
|
||||
}
|
||||
|
||||
function readCodeFor(type, charset, encodingExpr, config, options) {
|
||||
const supportBigNumbers = Boolean(
|
||||
options.supportBigNumbers || config.supportBigNumbers
|
||||
);
|
||||
const bigNumberStrings = Boolean(
|
||||
options.bigNumberStrings || config.bigNumberStrings
|
||||
);
|
||||
const timezone = options.timezone || config.timezone;
|
||||
const dateStrings = options.dateStrings || config.dateStrings;
|
||||
|
||||
switch (type) {
|
||||
case Types.TINY:
|
||||
case Types.SHORT:
|
||||
case Types.LONG:
|
||||
case Types.INT24:
|
||||
case Types.YEAR:
|
||||
return 'packet.parseLengthCodedIntNoBigCheck()';
|
||||
case Types.LONGLONG:
|
||||
if (supportBigNumbers && bigNumberStrings) {
|
||||
return 'packet.parseLengthCodedIntString()';
|
||||
}
|
||||
return `packet.parseLengthCodedInt(${supportBigNumbers})`;
|
||||
case Types.FLOAT:
|
||||
case Types.DOUBLE:
|
||||
return 'packet.parseLengthCodedFloat()';
|
||||
case Types.NULL:
|
||||
return 'packet.readLengthCodedNumber()';
|
||||
case Types.DECIMAL:
|
||||
case Types.NEWDECIMAL:
|
||||
if (config.decimalNumbers) {
|
||||
return 'packet.parseLengthCodedFloat()';
|
||||
}
|
||||
return 'packet.readLengthCodedString("ascii")';
|
||||
case Types.DATE:
|
||||
if (helpers.typeMatch(type, dateStrings, Types)) {
|
||||
return 'packet.readLengthCodedString("ascii")';
|
||||
}
|
||||
return `packet.parseDate(${helpers.srcEscape(timezone)})`;
|
||||
case Types.DATETIME:
|
||||
case Types.TIMESTAMP:
|
||||
if (helpers.typeMatch(type, dateStrings, Types)) {
|
||||
return 'packet.readLengthCodedString("ascii")';
|
||||
}
|
||||
return `packet.parseDateTime(${helpers.srcEscape(timezone)})`;
|
||||
case Types.TIME:
|
||||
return 'packet.readLengthCodedString("ascii")';
|
||||
case Types.GEOMETRY:
|
||||
return 'packet.parseGeometryValue()';
|
||||
case Types.VECTOR:
|
||||
return 'packet.parseVector()';
|
||||
case Types.JSON:
|
||||
// Since for JSON columns mysql always returns charset 63 (BINARY),
|
||||
// we have to handle it according to JSON specs and use "utf8",
|
||||
// see https://github.com/sidorares/node-mysql2/issues/409
|
||||
return config.jsonStrings
|
||||
? 'packet.readLengthCodedString("utf8")'
|
||||
: 'JSON.parse(packet.readLengthCodedString("utf8"))';
|
||||
default:
|
||||
if (charset === Charsets.BINARY) {
|
||||
return 'packet.readLengthCodedBuffer()';
|
||||
}
|
||||
return `packet.readLengthCodedString(${encodingExpr})`;
|
||||
}
|
||||
}
|
||||
|
||||
function compile(fields, options, config) {
|
||||
// use global typeCast if current query doesn't specify one
|
||||
if (
|
||||
typeof config.typeCast === 'function' &&
|
||||
typeof options.typeCast !== 'function'
|
||||
) {
|
||||
options.typeCast = config.typeCast;
|
||||
}
|
||||
|
||||
function wrap(field, _this) {
|
||||
return {
|
||||
type: typeNames[field.columnType],
|
||||
length: field.columnLength,
|
||||
db: field.schema,
|
||||
table: field.table,
|
||||
name: field.name,
|
||||
string: function (encoding = field.encoding) {
|
||||
if (field.columnType === Types.JSON && encoding === field.encoding) {
|
||||
// Since for JSON columns mysql always returns charset 63 (BINARY),
|
||||
// we have to handle it according to JSON specs and use "utf8",
|
||||
// see https://github.com/sidorares/node-mysql2/issues/1661
|
||||
console.warn(
|
||||
`typeCast: JSON column "${field.name}" is interpreted as BINARY by default, recommended to manually set utf8 encoding: \`field.string("utf8")\``
|
||||
);
|
||||
}
|
||||
|
||||
return _this.packet.readLengthCodedString(encoding);
|
||||
},
|
||||
buffer: function () {
|
||||
return _this.packet.readLengthCodedBuffer();
|
||||
},
|
||||
geometry: function () {
|
||||
return _this.packet.parseGeometryValue();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const parserFn = genFunc();
|
||||
|
||||
parserFn('(function () {')('return class TextRow {');
|
||||
|
||||
// constructor method
|
||||
parserFn('constructor(fields) {');
|
||||
// node-mysql typeCast compatibility wrapper
|
||||
// see https://github.com/mysqljs/mysql/blob/96fdd0566b654436624e2375c7b6604b1f50f825/lib/protocol/packets/Field.js
|
||||
if (typeof options.typeCast === 'function') {
|
||||
parserFn('const _this = this;');
|
||||
parserFn('for(let i=0; i<fields.length; ++i) {');
|
||||
parserFn('this[`wrap${i}`] = wrap(fields[i], _this);');
|
||||
parserFn('}');
|
||||
}
|
||||
parserFn('}');
|
||||
|
||||
// next method
|
||||
parserFn('next(packet, fields, options) {');
|
||||
parserFn('this.packet = packet;');
|
||||
if (options.rowsAsArray) {
|
||||
parserFn(`const result = new Array(${fields.length});`);
|
||||
} else {
|
||||
parserFn('const result = {};');
|
||||
}
|
||||
|
||||
const resultTables = {};
|
||||
let resultTablesArray = [];
|
||||
|
||||
if (options.nestTables === true) {
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
resultTables[fields[i].table] = 1;
|
||||
}
|
||||
resultTablesArray = Object.keys(resultTables);
|
||||
for (let i = 0; i < resultTablesArray.length; i++) {
|
||||
parserFn(`result[${helpers.fieldEscape(resultTablesArray[i])}] = {};`);
|
||||
}
|
||||
}
|
||||
|
||||
let lvalue = '';
|
||||
let fieldName = '';
|
||||
let tableName = '';
|
||||
for (let i = 0; i < fields.length; i++) {
|
||||
fieldName = helpers.fieldEscape(fields[i].name);
|
||||
// parserFn(`// ${fieldName}: ${typeNames[fields[i].columnType]}`);
|
||||
|
||||
if (typeof options.nestTables === 'string') {
|
||||
lvalue = `result[${helpers.fieldEscape(fields[i].table + options.nestTables + fields[i].name)}]`;
|
||||
} else if (options.nestTables === true) {
|
||||
tableName = helpers.fieldEscape(fields[i].table);
|
||||
|
||||
parserFn(`if (!result[${tableName}]) result[${tableName}] = {};`);
|
||||
lvalue = `result[${tableName}][${fieldName}]`;
|
||||
} else if (options.rowsAsArray) {
|
||||
lvalue = `result[${i.toString(10)}]`;
|
||||
} else {
|
||||
lvalue = `result[${fieldName}]`;
|
||||
}
|
||||
if (options.typeCast === false) {
|
||||
parserFn(`${lvalue} = packet.readLengthCodedBuffer();`);
|
||||
} else {
|
||||
const encodingExpr = `fields[${i}].encoding`;
|
||||
const readCode = readCodeFor(
|
||||
fields[i].columnType,
|
||||
fields[i].characterSet,
|
||||
encodingExpr,
|
||||
config,
|
||||
options
|
||||
);
|
||||
if (typeof options.typeCast === 'function') {
|
||||
parserFn(
|
||||
`${lvalue} = options.typeCast(this.wrap${i}, function() { return ${readCode} });`
|
||||
);
|
||||
} else {
|
||||
parserFn(`${lvalue} = ${readCode};`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
parserFn('return result;');
|
||||
parserFn('}');
|
||||
parserFn('};')('})()');
|
||||
|
||||
if (config.debug) {
|
||||
helpers.printDebugWithCode(
|
||||
'Compiled text protocol row parser',
|
||||
parserFn.toString()
|
||||
);
|
||||
}
|
||||
if (typeof options.typeCast === 'function') {
|
||||
return parserFn.toFunction({ wrap });
|
||||
}
|
||||
return parserFn.toFunction();
|
||||
}
|
||||
|
||||
function getTextParser(fields, options, config) {
|
||||
return parserCache.getParser('text', fields, options, config, compile);
|
||||
}
|
||||
|
||||
module.exports = getTextParser;
|
Reference in New Issue
Block a user