diff --git a/api/fe.js b/api/fe.js new file mode 100644 index 0000000..b11a622 --- /dev/null +++ b/api/fe.js @@ -0,0 +1,6 @@ +const { generateScript, generateContainerScript } = require('../forward_engineering/api'); + +module.exports = { + generateScript, + generateContainerScript, +}; diff --git a/esbuild.package.js b/esbuild.package.js index 05ab30f..1612a0e 100644 --- a/esbuild.package.js +++ b/esbuild.package.js @@ -2,6 +2,7 @@ const fs = require('fs'); const path = require('path'); const esbuild = require('esbuild'); const { clean } = require('esbuild-plugin-clean'); +const { copy } = require('esbuild-plugin-copy'); const { copyFolderFiles, addReleaseFlag } = require('@hackolade/hck-esbuild-plugins-pack'); const { EXCLUDED_EXTENSIONS, EXCLUDED_FILES, DEFAULT_RELEASE_FOLDER_PATH } = require('./buildConstants'); @@ -11,6 +12,7 @@ const RELEASE_FOLDER_PATH = path.join(DEFAULT_RELEASE_FOLDER_PATH, `${packageDat esbuild .build({ entryPoints: [ + path.resolve(__dirname, 'api', 'fe.js'), path.resolve(__dirname, 'api', 're.js'), path.resolve(__dirname, 'forward_engineering', 'api.js'), path.resolve(__dirname, 'reverse_engineering', 'api.js'), @@ -22,10 +24,17 @@ esbuild outdir: RELEASE_FOLDER_PATH, minify: true, logLevel: 'info', + external: ['lodash'], plugins: [ clean({ patterns: [DEFAULT_RELEASE_FOLDER_PATH], }), + copy({ + assets: { + from: [path.join('node_modules', 'lodash', '**', '*')], + to: [path.join('node_modules', 'lodash')], + }, + }), copyFolderFiles({ fromPath: __dirname, targetFolderPath: RELEASE_FOLDER_PATH, diff --git a/forward_engineering/api.js b/forward_engineering/api.js index 4a74c70..2220123 100644 --- a/forward_engineering/api.js +++ b/forward_engineering/api.js @@ -1,11 +1,7 @@ -'use strict'; -const { generateCollectionScript } = require('./services/protoScriptGenerationService'); -const { setDependencies, dependencies } = require('../reverse_engineering/appDependencies'); -const RECORD_NAME_STRATEGY = 'RecordNameStrategy'; -const TOPIC_RECORD_NAME_STRATEGY = 'TopicRecordNameStrategy'; -const protobufjs = require('protobufjs'); -const descriptor = require('protobufjs/ext/descriptor'); +const _ = require('lodash'); const { formatComment } = require('./helpers/utils'); +const { prepareScript } = require('./helpers/prepareScript'); +const { generateCollectionScript } = require('./services/protoScriptGenerationService'); const defaultContainerData = [ { @@ -17,11 +13,8 @@ const defaultContainerData = [ module.exports = { generateContainerScript(data, logger, callback, app) { - setDependencies(app); - const _ = dependencies.lodash; const containerData = !_.isEmpty(data.containerData) ? data.containerData : defaultContainerData; try { - const _ = dependencies.lodash; let preparedData = { ...data, containerData, @@ -69,7 +62,7 @@ module.exports = { ] .filter(row => row !== '') .join('\n'); - callback(null, this.prepareScript(script, preparedData)); + callback(null, prepareScript(script, preparedData)); } catch (error) { const errorObject = { message: error.message, @@ -80,12 +73,10 @@ module.exports = { callback(errorObject); } }, + generateScript(data, logger, callback, app) { - setDependencies(app); - const _ = dependencies.lodash; const containerData = !_.isEmpty(data.containerData) ? data.containerData : defaultContainerData; try { - const _ = dependencies.lodash; let preparedData = { ...data, containerData, @@ -105,7 +96,7 @@ module.exports = { ] .filter(row => row !== '') .join('\n'); - callback(null, this.prepareScript(script, preparedData)); + callback(null, prepareScript(script, preparedData)); } catch (error) { const errorObject = { message: error.message, @@ -115,75 +106,4 @@ module.exports = { callback(errorObject); } }, - - prepareScript(script, data) { - const _ = dependencies.lodash; - const targetSchemaRegistry = _.get(data, 'options.targetScriptOptions.keyword'); - if (targetSchemaRegistry === 'confluentSchemaRegistry') { - return this.getConfluentPostQuery({ data, schema: script }); - } - if (targetSchemaRegistry === 'pulsarSchemaRegistry') { - return this.getPulsarPostQuery({ data, schema: script }); - } - - return script; - }, - - getPulsarPostQuery({ data, schema }) { - const _ = dependencies.lodash; - const root = protobufjs.parse(schema).root; - const descriptorMsg = root.toDescriptor('proto3'); - const buffer = descriptor.FileDescriptorSet.encode(descriptorMsg).finish(); - const fileDescriptorSet = buffer.toString('base64'); - const descriptorJson = descriptorMsg.toJSON(); - const rootMessageTypeName = `${_.get(descriptorJson, 'file[0].package')}.${_.get(descriptorJson, 'file[0].messageType[0].name')}`; - const rootFileDescriptorName = _.get(descriptorJson, 'file[0].name'); - const body = { - fileDescriptorSet, - rootMessageTypeName, - rootFileDescriptorName, - }; - const bodyObject = { - type: 'PROTOBUF_NATIVE', - data: body, - properties: {}, - }; - const namespace = _.get(data, 'containerData[0].name', ''); - const topic = _.get(data, 'containerData[0].pulsarTopicName', ''); - const persistence = _.get(data, 'containerData[0].isNonPersistentTopic', false) - ? 'non-persistent' - : 'persistent'; - return `POST /${persistence}/${namespace}/${topic}/schema\n\n${JSON.stringify(bodyObject, null, 4)}`; - }, - - getConfluentPostQuery({ data, schema }) { - const getName = () => { - const _ = dependencies.lodash; - const name = this.getRecordName(data); - - const schemaType = _.get(data, 'containerData[0].schemaType'); - const containerName = _.get(data, 'containerData[0].name'); - const topic = _.get(data, 'modelData[0].schemaTopic'); - - const typePostfix = schemaType ? `-${schemaType}` : ''; - const containerPrefix = containerName ? `${containerName}.` : ''; - const topicPrefix = topic ? `${topic}-` : ''; - - const schemaNameStrategy = _.get(data, 'modelData[0].schemaNameStrategy', ''); - switch (schemaNameStrategy) { - case RECORD_NAME_STRATEGY: - return `${containerPrefix}${name}${typePostfix}`; - case TOPIC_RECORD_NAME_STRATEGY: - return `${topicPrefix}${containerPrefix}${name}${typePostfix}`; - default: - return `${name}${typePostfix}`; - } - }; - - return `POST /subjects/${getName()}/versions\n\n${schema}`; - }, - - getRecordName(data) { - return data.containerData[0].code || data.containerData[0].name || data.containerData[0].collectionName; - }, }; diff --git a/forward_engineering/helpers/DefinitionsHelper.js b/forward_engineering/helpers/DefinitionsHelper.js index c98fb0c..59d2752 100644 --- a/forward_engineering/helpers/DefinitionsHelper.js +++ b/forward_engineering/helpers/DefinitionsHelper.js @@ -1,9 +1,7 @@ -const { dependencies } = require('../../reverse_engineering/appDependencies'); - +const _ = require('lodash'); const MODEL_DEFINITION_REF_REGEX = /#model\/definitions\/(.*)/; const parseDefinitions = definitions => { - const _ = dependencies.lodash; return Object.entries(_.get(JSON.parse(definitions), 'properties', {})) .map(([key, value]) => ({ title: key, ...value })) .filter(definition => { @@ -14,7 +12,6 @@ const parseDefinitions = definitions => { }; const getDefinitionInfo = (definitions, fieldOptions, referenceId) => { - const _ = dependencies.lodash; const requiredDefinition = getReferencedDefinition(definitions, referenceId); if (requiredDefinition) { @@ -30,7 +27,6 @@ const getDefinitionInfo = (definitions, fieldOptions, referenceId) => { }; const getReferencedDefinition = (definitions, referenceId) => { - const _ = dependencies.lodash; return definitions.find(definition => _.get(definition, 'definitionRefs', []).some(ref => _.last(ref) === referenceId), ); @@ -71,7 +67,6 @@ const extractDefinitionsFromProperties = (properties = []) => { }; const convertEntityTypeToValidName = type => { - const _ = dependencies.lodash; return _.lowerCase(type).split(' ').join('_'); }; diff --git a/forward_engineering/helpers/FieldNumberGenerationHelper.js b/forward_engineering/helpers/FieldNumberGenerationHelper.js index 9c58dfa..e89e8d1 100644 --- a/forward_engineering/helpers/FieldNumberGenerationHelper.js +++ b/forward_engineering/helpers/FieldNumberGenerationHelper.js @@ -1,7 +1,6 @@ -const { dependencies } = require('../../reverse_engineering/appDependencies'); +const _ = require('lodash'); const fixFieldNumbers = ({ fields, oneOfFields, oneOfIndex, reservedNumbers }) => { - const _ = dependencies.lodash; const fieldEntries = Object.entries(fields); fieldEntries.splice(oneOfIndex, 0, ...Object.entries(oneOfFields)); const checks = getChecksFromReservedNumbers(reservedNumbers); @@ -74,8 +73,6 @@ const getChecksFromReservedNumbers = reservedNumbers => { }; const generateSequence = (fieldsNumber, checks) => { - const _ = dependencies.lodash; - const fieldsNumberPositionRange = _.range(1, fieldsNumber + 1); return fieldsNumberPositionRange.reduce( @@ -85,7 +82,6 @@ const generateSequence = (fieldsNumber, checks) => { }; const getNextFieldNumber = (position, usedNumbers, checks) => { - const _ = dependencies.lodash; const candidates = _.range(position, position + 1000); const nextNumber = candidates.find( candidate => !usedNumbers.includes(candidate) && notReservedField(candidate, checks), diff --git a/forward_engineering/helpers/getConfluentPostQuery.js b/forward_engineering/helpers/getConfluentPostQuery.js new file mode 100644 index 0000000..1797ec1 --- /dev/null +++ b/forward_engineering/helpers/getConfluentPostQuery.js @@ -0,0 +1,38 @@ +const _ = require('lodash'); + +const RECORD_NAME_STRATEGY = 'RecordNameStrategy'; +const TOPIC_RECORD_NAME_STRATEGY = 'TopicRecordNameStrategy'; + +const getRecordName = data => { + return data.containerData[0].code || data.containerData[0].name || data.containerData[0].collectionName; +}; + +const getConfluentPostQuery = ({ data, schema }) => { + const getName = () => { + const name = getRecordName(data); + + const schemaType = _.get(data, 'containerData[0].schemaType'); + const containerName = _.get(data, 'containerData[0].name'); + const topic = _.get(data, 'modelData[0].schemaTopic'); + + const typePostfix = schemaType ? `-${schemaType}` : ''; + const containerPrefix = containerName ? `${containerName}.` : ''; + const topicPrefix = topic ? `${topic}-` : ''; + + const schemaNameStrategy = _.get(data, 'modelData[0].schemaNameStrategy', ''); + switch (schemaNameStrategy) { + case RECORD_NAME_STRATEGY: + return `${containerPrefix}${name}${typePostfix}`; + case TOPIC_RECORD_NAME_STRATEGY: + return `${topicPrefix}${containerPrefix}${name}${typePostfix}`; + default: + return `${name}${typePostfix}`; + } + }; + + return `POST /subjects/${getName()}/versions\n\n${schema}`; +}; + +module.exports = { + getConfluentPostQuery, +}; diff --git a/forward_engineering/helpers/getPulsarPostQuery.js b/forward_engineering/helpers/getPulsarPostQuery.js new file mode 100644 index 0000000..5887565 --- /dev/null +++ b/forward_engineering/helpers/getPulsarPostQuery.js @@ -0,0 +1,31 @@ +const _ = require('lodash'); +const protobufjs = require('protobufjs'); +const descriptor = require('protobufjs/ext/descriptor'); + +const getPulsarPostQuery = ({ data, schema }) => { + const root = protobufjs.parse(schema).root; + const descriptorMsg = root.toDescriptor('proto3'); + const buffer = descriptor.FileDescriptorSet.encode(descriptorMsg).finish(); + const fileDescriptorSet = buffer.toString('base64'); + const descriptorJson = descriptorMsg.toJSON(); + const rootMessageTypeName = `${_.get(descriptorJson, 'file[0].package')}.${_.get(descriptorJson, 'file[0].messageType[0].name')}`; + const rootFileDescriptorName = _.get(descriptorJson, 'file[0].name'); + const body = { + fileDescriptorSet, + rootMessageTypeName, + rootFileDescriptorName, + }; + const bodyObject = { + type: 'PROTOBUF_NATIVE', + data: body, + properties: {}, + }; + const namespace = _.get(data, 'containerData[0].name', ''); + const topic = _.get(data, 'containerData[0].pulsarTopicName', ''); + const persistence = _.get(data, 'containerData[0].isNonPersistentTopic', false) ? 'non-persistent' : 'persistent'; + return `POST /${persistence}/${namespace}/${topic}/schema\n\n${JSON.stringify(bodyObject, null, 4)}`; +}; + +module.exports = { + getPulsarPostQuery, +}; diff --git a/forward_engineering/helpers/prepareScript.js b/forward_engineering/helpers/prepareScript.js new file mode 100644 index 0000000..7501917 --- /dev/null +++ b/forward_engineering/helpers/prepareScript.js @@ -0,0 +1,19 @@ +const _ = require('lodash'); +const { getConfluentPostQuery } = require('./getConfluentPostQuery'); +const { getPulsarPostQuery } = require('./getPulsarPostQuery'); + +const prepareScript = (script, data) => { + const targetSchemaRegistry = _.get(data, 'options.targetScriptOptions.keyword'); + if (targetSchemaRegistry === 'confluentSchemaRegistry') { + return getConfluentPostQuery({ data, schema: script }); + } + if (targetSchemaRegistry === 'pulsarSchemaRegistry') { + return getPulsarPostQuery({ data, schema: script }); + } + + return script; +}; + +module.exports = { + prepareScript, +}; diff --git a/forward_engineering/services/protoScriptGenerationService.js b/forward_engineering/services/protoScriptGenerationService.js index 25731e7..5710941 100644 --- a/forward_engineering/services/protoScriptGenerationService.js +++ b/forward_engineering/services/protoScriptGenerationService.js @@ -1,4 +1,4 @@ -const { dependencies } = require('../../reverse_engineering/appDependencies'); +const _ = require('lodash'); const { parseDefinitions, getDefinitionInfo, @@ -98,7 +98,6 @@ const getMessageStatement = ({ modelDefinitions, externalDefinitions, }) => { - const _ = dependencies.lodash; if (jsonSchema.$ref) { const definitionName = jsonSchema.$ref.slice('#model/definitions/'.length); jsonSchema = modelDefinitions.find(definition => definition.title === definitionName) || jsonSchema; @@ -162,7 +161,6 @@ const concutFieldsStatements = (fieldsStatements, oneOfStatement, oneOfIndex) => const getOneOfStatement = (oneOfMeta, fields, spacePrefix = '') => { const oneOfName = oneOfMeta?.name || 'one_of'; - const _ = dependencies.lodash; if (_.isEmpty(fields)) { return ''; } @@ -206,7 +204,6 @@ const getOptionStatement = (option, spacePrefix) => { }; const getReservedStatements = (data, spacePrefix) => { - const _ = dependencies.lodash; const reservedFieldNames = !_.isEmpty(data.reservedFieldNames) ? `${spacePrefix}reserved ${data.reservedFieldNames};` : ``; @@ -228,7 +225,6 @@ const getFieldsStatement = ({ externalDefinitions, oneOfIndex, }) => { - const _ = dependencies.lodash; const oneOfFields = Object.entries( (jsonSchema.oneOf || []).reduce((properties, property) => ({ ...properties, ...property.properties }), {}), ).reduce((oneOfProperties, [key, value]) => ({ ...oneOfProperties, [key]: { ...value, parent: 'oneOf' } }), {}); @@ -303,7 +299,6 @@ const getFieldInfo = ({ externalDefinitions, }) => { const getUDT = udt => { - const _ = dependencies.lodash; return !_.isEmpty(udt) ? udt : 'string'; }; if (isExternalRef) { @@ -352,8 +347,6 @@ const getValidatedFieldRule = ({ fieldRule, protoVersion }) => { }; const getFieldOptionsStatement = options => { - const _ = dependencies.lodash; - const stringifiedOptions = (options || []) .filter(option => option?.optionKey && option?.optionValue) .filter(option => option.optionKey !== 'allow_alias') diff --git a/package-lock.json b/package-lock.json index 28ef6c0..d28b8b4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "Protobuf", - "version": "0.2.1", + "version": "0.2.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "Protobuf", - "version": "0.2.1", + "version": "0.2.2", "dependencies": { "antlr4": "4.8.0", "lodash": "4.17.21", @@ -18,6 +18,7 @@ "@typescript-eslint/parser": "7.11.0", "esbuild": "0.20.2", "esbuild-plugin-clean": "1.0.1", + "esbuild-plugin-copy": "2.1.1", "eslint": "8.57.0", "eslint-config-prettier": "9.1.0", "eslint-formatter-teamcity": "^1.0.0", @@ -928,6 +929,19 @@ "resolved": "https://registry.npmjs.org/antlr4/-/antlr4-4.8.0.tgz", "integrity": "sha512-en/MxQ4OkPgGJQ3wD/muzj1uDnFSzdFIhc2+c6bHZokWkuBb6RRvFjpWhPxWLbgQvaEzldJZ0GSQpfSAaE3hqg==" }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -1078,6 +1092,18 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -1144,6 +1170,42 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/clean-stack": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", @@ -1587,6 +1649,35 @@ "esbuild": ">= 0.14.0" } }, + "node_modules/esbuild-plugin-copy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/esbuild-plugin-copy/-/esbuild-plugin-copy-2.1.1.tgz", + "integrity": "sha512-Bk66jpevTcV8KMFzZI1P7MZKZ+uDcrZm2G2egZ2jNIvVnivDpodZI+/KnpL3Jnap0PBdIHU7HwFGB8r+vV5CVw==", + "dev": true, + "dependencies": { + "chalk": "^4.1.2", + "chokidar": "^3.5.3", + "fs-extra": "^10.0.1", + "globby": "^11.0.3" + }, + "peerDependencies": { + "esbuild": ">= 0.14.0" + } + }, + "node_modules/esbuild-plugin-copy/node_modules/fs-extra": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz", + "integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", @@ -2122,6 +2213,20 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -2518,6 +2623,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/is-boolean-object": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", @@ -3086,6 +3203,15 @@ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/npm-run-path": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", @@ -3459,6 +3585,18 @@ } ] }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, "node_modules/regexp.prototype.flags": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", diff --git a/package.json b/package.json index 7d2aca7..18c6ff8 100644 --- a/package.json +++ b/package.json @@ -47,7 +47,8 @@ }, "scripts": { "lint": "eslint . --max-warnings=0", - "package": "node esbuild.package.js" + "package": "node esbuild.package.js", + "postinstall": "npx simple-git-hooks" }, "devDependencies": { "@hackolade/hck-esbuild-plugins-pack": "0.0.1", @@ -55,6 +56,7 @@ "@typescript-eslint/parser": "7.11.0", "esbuild": "0.20.2", "esbuild-plugin-clean": "1.0.1", + "esbuild-plugin-copy": "2.1.1", "eslint": "8.57.0", "eslint-config-prettier": "9.1.0", "eslint-formatter-teamcity": "^1.0.0", @@ -65,4 +67,4 @@ "prettier": "3.2.5", "simple-git-hooks": "2.11.1" } -} \ No newline at end of file +} diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index b6328fd..6538a72 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -1,5 +1,4 @@ -'use strict'; - +const _ = require('lodash'); const fs = require('fs'); const path = require('path'); const antlr4 = require('antlr4'); @@ -7,15 +6,12 @@ const Protobuf3Lexer = require('./parser/Protobuf3Lexer'); const Protobuf3Parser = require('./parser/Protobuf3Parser'); const protoToCollectionsVisitor = require('./protobufToCollectionsVisitor'); const ExprErrorListener = require('./antlrErrorListener'); -const { setDependencies, dependencies } = require('./appDependencies'); const { parseDescriptor } = require('./services/descriptorToProtoStringService'); const { convertParsedFileDataToCollections } = require('./services/converterService'); const { adaptJsonSchema } = require('./helpers/adaptJsonSchema/adaptJsonSchema'); module.exports = { reFromFile: async (data, logger, callback, app) => { - setDependencies(app); - const _ = dependencies.lodash; try { let input = await handleFileData(data.filePath); const isDescriptor = !_.isError(_.attempt(JSON.parse, input)); diff --git a/reverse_engineering/appDependencies.js b/reverse_engineering/appDependencies.js deleted file mode 100644 index 287e4ac..0000000 --- a/reverse_engineering/appDependencies.js +++ /dev/null @@ -1,10 +0,0 @@ -let dependencies = {}; - -const setDependencies = app => { - dependencies.lodash = app.require('lodash'); -}; - -module.exports = { - setDependencies, - dependencies, -}; diff --git a/reverse_engineering/helpers/adaptJsonSchema/adaptJsonSchema.js b/reverse_engineering/helpers/adaptJsonSchema/adaptJsonSchema.js index 8211e4b..3cb71c6 100644 --- a/reverse_engineering/helpers/adaptJsonSchema/adaptJsonSchema.js +++ b/reverse_engineering/helpers/adaptJsonSchema/adaptJsonSchema.js @@ -1,4 +1,4 @@ -const { setDependencies } = require('../../appDependencies'); +const _ = require('lodash'); const mapJsonSchema = require('./mapJsonSchema'); const handleNumericType = jsonSchema => { @@ -34,7 +34,6 @@ const adaptSchema = jsonSchema => { }; const adaptJsonSchema = (data, logger, callback, app) => { - setDependencies(app); logger.log('info', 'Adaptation of JSON Schema started...', 'Adapt JSON Schema'); try { const jsonSchema = JSON.parse(data.jsonSchema); diff --git a/reverse_engineering/helpers/adaptJsonSchema/mapJsonSchema.js b/reverse_engineering/helpers/adaptJsonSchema/mapJsonSchema.js index f0621a9..d3709a6 100644 --- a/reverse_engineering/helpers/adaptJsonSchema/mapJsonSchema.js +++ b/reverse_engineering/helpers/adaptJsonSchema/mapJsonSchema.js @@ -1,9 +1,8 @@ -const { dependencies } = require('../../appDependencies'); +const _ = require('lodash'); -const add = (obj, properties) => Object.assign({}, obj, properties); +const add = (obj, properties) => ({ ...obj, ...properties }); const mapJsonSchema = (jsonSchema, callback) => { - const _ = dependencies.lodash; const mapProperties = (properties, mapper) => Object.keys(properties).reduce((newProperties, propertyName) => { return add(newProperties, { @@ -25,9 +24,7 @@ const mapJsonSchema = (jsonSchema, callback) => { return jsonSchema; } - return Object.assign({}, jsonSchema, { - [propertyName]: mapper(jsonSchema[propertyName]), - }); + return { ...jsonSchema, [propertyName]: mapper(jsonSchema[propertyName]) }; }, jsonSchema); }; if (!_.isPlainObject(jsonSchema)) { @@ -37,7 +34,7 @@ const mapJsonSchema = (jsonSchema, callback) => { const propertiesLike = ['properties', 'definitions', 'patternProperties']; const itemsLike = ['items', 'oneOf', 'allOf', 'anyOf', 'not']; - const copyJsonSchema = Object.assign({}, jsonSchema); + const copyJsonSchema = { ...jsonSchema }; const jsonSchemaWithNewProperties = applyTo( propertiesLike, copyJsonSchema, diff --git a/reverse_engineering/helpers/rootMessageFinder.js b/reverse_engineering/helpers/rootMessageFinder.js index 1199e36..49e526d 100644 --- a/reverse_engineering/helpers/rootMessageFinder.js +++ b/reverse_engineering/helpers/rootMessageFinder.js @@ -1,7 +1,6 @@ -const { dependencies } = require('../appDependencies'); +const _ = require('lodash'); const getRootMessageName = messages => { - const _ = dependencies.lodash; const topLevelMessagesNames = messages.map(message => message.name); const referencedTopLevelNames = _.uniq( messages.reduce((referencedTopLevelNames, message) => { @@ -18,7 +17,6 @@ const getRootMessageName = messages => { }; const getTopLevelTypes = (message, topLevelMessages) => { - const _ = dependencies.lodash; const fieldProperties = message.body .filter(property => property) .filter(property => property.elementType === 'field'); diff --git a/reverse_engineering/protobufToCollectionsVisitor.js b/reverse_engineering/protobufToCollectionsVisitor.js index 31a8214..5989120 100644 --- a/reverse_engineering/protobufToCollectionsVisitor.js +++ b/reverse_engineering/protobufToCollectionsVisitor.js @@ -1,5 +1,5 @@ +const _ = require('lodash'); const { Protobuf3Visitor } = require('./parser/Protobuf3Visitor'); -const { dependencies } = require('./appDependencies'); const { MESSAGE_TYPE, ENUM_TYPE, @@ -295,7 +295,7 @@ const getLabelValue = (context, label) => { }; const getName = context => { - if (!context || dependencies.lodash.isEmpty(context)) { + if (!context || _.isEmpty(context)) { return ''; } return removeQuotes(context.getText()); diff --git a/reverse_engineering/services/converterService.js b/reverse_engineering/services/converterService.js index f598821..0c3deb8 100644 --- a/reverse_engineering/services/converterService.js +++ b/reverse_engineering/services/converterService.js @@ -1,4 +1,4 @@ -const { dependencies } = require('../appDependencies'); +const _ = require('lodash'); const { getRootMessageName } = require('../helpers/rootMessageFinder'); const { fixFileName, determineSchemaType } = require('../helpers/fileNameHelper'); const { @@ -16,7 +16,6 @@ const { const NEW_DATABASE = 'New File'; const convertParsedFileDataToCollections = (parsedData, fileName) => { - const _ = dependencies.lodash; const dbName = fixFileName(fileName); const schemaType = determineSchemaType(fileName); const rootMessageName = getRootMessageName(parsedData.messages); @@ -214,7 +213,6 @@ const generalFieldConverter = ({ internalDefinitions = [], hackoladeGeneratedDefsNames = [], }) => { - const _ = dependencies.lodash; if (hackoladeGeneratedDefsNames.includes(field.type) && internalDefinitionsNames.includes(field.type)) { return internalDefinitions.find(def => def.name === field.type); } @@ -280,7 +278,6 @@ const messageFieldConverter = ({ internalDefinitions = [], hackoladeGeneratedDefsNames = [], }) => { - const _ = dependencies.lodash; const entitiesDefinitionsTypes = message.body .filter(field => field) .filter(field => [MESSAGE_TYPE, ENUM_TYPE].includes(field.elementType)); @@ -409,7 +406,6 @@ const getMapKeyType = type => { }; const convertEnum = parsedEnum => { - const _ = dependencies.lodash; const parsedOptions = parsedEnum.body .filter(Boolean) .filter(element => element) @@ -440,7 +436,6 @@ const convertEnum = parsedEnum => { }; const getHackoladeGeneratedDefNames = rootMessage => { - const _ = dependencies.lodash; const usageFrequency = _.countBy(countDefinitionUsageFrequency(rootMessage), name => name); return Object.entries(usageFrequency) .filter(([name, frequency]) => frequency === 1) @@ -448,7 +443,6 @@ const getHackoladeGeneratedDefNames = rootMessage => { }; const countDefinitionUsageFrequency = message => { - const _ = dependencies.lodash; const defNamesInFields = message.body .filter(Boolean) .filter(element => element.elementType === FIELD_TYPE) diff --git a/reverse_engineering/services/descriptorToProtoStringService.js b/reverse_engineering/services/descriptorToProtoStringService.js index 5f8efaa..4aca5ac 100644 --- a/reverse_engineering/services/descriptorToProtoStringService.js +++ b/reverse_engineering/services/descriptorToProtoStringService.js @@ -1,11 +1,10 @@ +const _ = require('lodash'); const protobufjs = require('protobufjs'); -const { dependencies } = require('../appDependencies'); const descriptor = require('protobufjs/ext/descriptor'); let syntax = 'proto3'; const parseDescriptor = descriptorString => { - const _ = dependencies.lodash; const buffer = Buffer.from(descriptorString, 'base64'); const decodedDescriptor = descriptor.FileDescriptorSet.decode(buffer); const root = protobufjs.Root.fromDescriptor(decodedDescriptor); @@ -36,7 +35,6 @@ const determineSyntax = definitions => { }; const parseDefinition = definition => { - const _ = dependencies.lodash; if (_.has(definition, 'body.fields')) { return getMessageStatement(definition); } @@ -55,7 +53,6 @@ const getEnumStatement = definition => { }; const getMessageStatement = definition => { - const _ = dependencies.lodash; const fields = definition.body.fields; const reservedValuesStatement = getReservedStatement(definition.body.reserved); const getDefaultRule = () => { @@ -88,7 +85,6 @@ const getMessageStatement = definition => { }; const getReservedStatement = reservedStatements => { - const _ = dependencies.lodash; if (!reservedStatements) { return ''; } @@ -114,7 +110,6 @@ const getReservedStatement = reservedStatements => { ); }; const getFieldNumberValue = fieldNumber => { - const _ = dependencies.lodash; const uniqueNumbers = _.uniq(fieldNumber); if (_.size(uniqueNumbers) === 1) { return `${uniqueNumbers[0]}`; @@ -125,13 +120,14 @@ const getReservedStatement = reservedStatements => { const { columnNames, filedNumbers } = splitColumnNamesAndFieldNumbers(reservedStatements); const reservedColumns = columnNames.map(name => `"${name}"`).join(', '); const reservedNumbers = filedNumbers.map(number => getFieldNumberValue(number)).join(', '); - return [ - `${!_.isEmpty(reservedColumns) ? `reserved ${reservedColumns};` : ''}`, - `${!_.isEmpty(reservedNumbers) ? `reserved ${reservedNumbers};` : ''}`, - ].join('\n'); + + const columnsStatement = !_.isEmpty(reservedColumns) ? `reserved ${reservedColumns};` : ''; + const numbersStatement = !_.isEmpty(reservedNumbers) ? `reserved ${reservedNumbers};` : ''; + + return [columnsStatement, numbersStatement].join('\n'); }; + const constructPackageName = (descriptor, packageNameParts = []) => { - const _ = dependencies.lodash; const singlePropertyObject = _.keys(descriptor).length === 1; if (_.has(descriptor, 'nested') && singlePropertyObject) { return constructPackageName(descriptor.nested, packageNameParts);