Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions api/fe.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
const { generateScript, generateContainerScript } = require('../forward_engineering/api');

module.exports = {
generateScript,
generateContainerScript,
};
9 changes: 9 additions & 0 deletions esbuild.package.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ const fs = require('fs');
const path = require('path');
const esbuild = require('esbuild');
const { clean } = require('esbuild-plugin-clean');
const { copy } = require('esbuild-plugin-copy');
const { copyFolderFiles, addReleaseFlag } = require('@hackolade/hck-esbuild-plugins-pack');
const { EXCLUDED_EXTENSIONS, EXCLUDED_FILES, DEFAULT_RELEASE_FOLDER_PATH } = require('./buildConstants');

Expand All @@ -11,6 +12,7 @@ const RELEASE_FOLDER_PATH = path.join(DEFAULT_RELEASE_FOLDER_PATH, `${packageDat
esbuild
.build({
entryPoints: [
path.resolve(__dirname, 'api', 'fe.js'),
path.resolve(__dirname, 'api', 're.js'),
path.resolve(__dirname, 'forward_engineering', 'api.js'),
path.resolve(__dirname, 'reverse_engineering', 'api.js'),
Expand All @@ -22,10 +24,17 @@ esbuild
outdir: RELEASE_FOLDER_PATH,
minify: true,
logLevel: 'info',
external: ['lodash'],
plugins: [
clean({
patterns: [DEFAULT_RELEASE_FOLDER_PATH],
}),
copy({
assets: {
from: [path.join('node_modules', 'lodash', '**', '*')],
to: [path.join('node_modules', 'lodash')],
},
}),
copyFolderFiles({
fromPath: __dirname,
targetFolderPath: RELEASE_FOLDER_PATH,
Expand Down
92 changes: 6 additions & 86 deletions forward_engineering/api.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
'use strict';
const { generateCollectionScript } = require('./services/protoScriptGenerationService');
const { setDependencies, dependencies } = require('../reverse_engineering/appDependencies');
const RECORD_NAME_STRATEGY = 'RecordNameStrategy';
const TOPIC_RECORD_NAME_STRATEGY = 'TopicRecordNameStrategy';
const protobufjs = require('protobufjs');
const descriptor = require('protobufjs/ext/descriptor');
const _ = require('lodash');
const { formatComment } = require('./helpers/utils');
const { prepareScript } = require('./helpers/prepareScript');
const { generateCollectionScript } = require('./services/protoScriptGenerationService');

const defaultContainerData = [
{
Expand All @@ -17,11 +13,8 @@ const defaultContainerData = [

module.exports = {
generateContainerScript(data, logger, callback, app) {
setDependencies(app);
const _ = dependencies.lodash;
const containerData = !_.isEmpty(data.containerData) ? data.containerData : defaultContainerData;
try {
const _ = dependencies.lodash;
let preparedData = {
...data,
containerData,
Expand Down Expand Up @@ -69,7 +62,7 @@ module.exports = {
]
.filter(row => row !== '')
.join('\n');
callback(null, this.prepareScript(script, preparedData));
callback(null, prepareScript(script, preparedData));
} catch (error) {
const errorObject = {
message: error.message,
Expand All @@ -80,12 +73,10 @@ module.exports = {
callback(errorObject);
}
},

generateScript(data, logger, callback, app) {
setDependencies(app);
const _ = dependencies.lodash;
const containerData = !_.isEmpty(data.containerData) ? data.containerData : defaultContainerData;
try {
const _ = dependencies.lodash;
let preparedData = {
...data,
containerData,
Expand All @@ -105,7 +96,7 @@ module.exports = {
]
.filter(row => row !== '')
.join('\n');
callback(null, this.prepareScript(script, preparedData));
callback(null, prepareScript(script, preparedData));
} catch (error) {
const errorObject = {
message: error.message,
Expand All @@ -115,75 +106,4 @@ module.exports = {
callback(errorObject);
}
},

prepareScript(script, data) {
const _ = dependencies.lodash;
const targetSchemaRegistry = _.get(data, 'options.targetScriptOptions.keyword');
if (targetSchemaRegistry === 'confluentSchemaRegistry') {
return this.getConfluentPostQuery({ data, schema: script });
}
if (targetSchemaRegistry === 'pulsarSchemaRegistry') {
return this.getPulsarPostQuery({ data, schema: script });
}

return script;
},

getPulsarPostQuery({ data, schema }) {
const _ = dependencies.lodash;
const root = protobufjs.parse(schema).root;
const descriptorMsg = root.toDescriptor('proto3');
const buffer = descriptor.FileDescriptorSet.encode(descriptorMsg).finish();
const fileDescriptorSet = buffer.toString('base64');
const descriptorJson = descriptorMsg.toJSON();
const rootMessageTypeName = `${_.get(descriptorJson, 'file[0].package')}.${_.get(descriptorJson, 'file[0].messageType[0].name')}`;
const rootFileDescriptorName = _.get(descriptorJson, 'file[0].name');
const body = {
fileDescriptorSet,
rootMessageTypeName,
rootFileDescriptorName,
};
const bodyObject = {
type: 'PROTOBUF_NATIVE',
data: body,
properties: {},
};
const namespace = _.get(data, 'containerData[0].name', '');
const topic = _.get(data, 'containerData[0].pulsarTopicName', '');
const persistence = _.get(data, 'containerData[0].isNonPersistentTopic', false)
? 'non-persistent'
: 'persistent';
return `POST /${persistence}/${namespace}/${topic}/schema\n\n${JSON.stringify(bodyObject, null, 4)}`;
},

getConfluentPostQuery({ data, schema }) {
const getName = () => {
const _ = dependencies.lodash;
const name = this.getRecordName(data);

const schemaType = _.get(data, 'containerData[0].schemaType');
const containerName = _.get(data, 'containerData[0].name');
const topic = _.get(data, 'modelData[0].schemaTopic');

const typePostfix = schemaType ? `-${schemaType}` : '';
const containerPrefix = containerName ? `${containerName}.` : '';
const topicPrefix = topic ? `${topic}-` : '';

const schemaNameStrategy = _.get(data, 'modelData[0].schemaNameStrategy', '');
switch (schemaNameStrategy) {
case RECORD_NAME_STRATEGY:
return `${containerPrefix}${name}${typePostfix}`;
case TOPIC_RECORD_NAME_STRATEGY:
return `${topicPrefix}${containerPrefix}${name}${typePostfix}`;
default:
return `${name}${typePostfix}`;
}
};

return `POST /subjects/${getName()}/versions\n\n${schema}`;
},

getRecordName(data) {
return data.containerData[0].code || data.containerData[0].name || data.containerData[0].collectionName;
},
};
7 changes: 1 addition & 6 deletions forward_engineering/helpers/DefinitionsHelper.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
const { dependencies } = require('../../reverse_engineering/appDependencies');

const _ = require('lodash');
const MODEL_DEFINITION_REF_REGEX = /#model\/definitions\/(.*)/;

const parseDefinitions = definitions => {
const _ = dependencies.lodash;
return Object.entries(_.get(JSON.parse(definitions), 'properties', {}))
.map(([key, value]) => ({ title: key, ...value }))
.filter(definition => {
Expand All @@ -14,7 +12,6 @@ const parseDefinitions = definitions => {
};

const getDefinitionInfo = (definitions, fieldOptions, referenceId) => {
const _ = dependencies.lodash;
const requiredDefinition = getReferencedDefinition(definitions, referenceId);

if (requiredDefinition) {
Expand All @@ -30,7 +27,6 @@ const getDefinitionInfo = (definitions, fieldOptions, referenceId) => {
};

const getReferencedDefinition = (definitions, referenceId) => {
const _ = dependencies.lodash;
return definitions.find(definition =>
_.get(definition, 'definitionRefs', []).some(ref => _.last(ref) === referenceId),
);
Expand Down Expand Up @@ -71,7 +67,6 @@ const extractDefinitionsFromProperties = (properties = []) => {
};

const convertEntityTypeToValidName = type => {
const _ = dependencies.lodash;
return _.lowerCase(type).split(' ').join('_');
};

Expand Down
6 changes: 1 addition & 5 deletions forward_engineering/helpers/FieldNumberGenerationHelper.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
const { dependencies } = require('../../reverse_engineering/appDependencies');
const _ = require('lodash');

const fixFieldNumbers = ({ fields, oneOfFields, oneOfIndex, reservedNumbers }) => {
const _ = dependencies.lodash;
const fieldEntries = Object.entries(fields);
fieldEntries.splice(oneOfIndex, 0, ...Object.entries(oneOfFields));
const checks = getChecksFromReservedNumbers(reservedNumbers);
Expand Down Expand Up @@ -74,8 +73,6 @@ const getChecksFromReservedNumbers = reservedNumbers => {
};

const generateSequence = (fieldsNumber, checks) => {
const _ = dependencies.lodash;

const fieldsNumberPositionRange = _.range(1, fieldsNumber + 1);

return fieldsNumberPositionRange.reduce(
Expand All @@ -85,7 +82,6 @@ const generateSequence = (fieldsNumber, checks) => {
};

const getNextFieldNumber = (position, usedNumbers, checks) => {
const _ = dependencies.lodash;
const candidates = _.range(position, position + 1000);
const nextNumber = candidates.find(
candidate => !usedNumbers.includes(candidate) && notReservedField(candidate, checks),
Expand Down
38 changes: 38 additions & 0 deletions forward_engineering/helpers/getConfluentPostQuery.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
const _ = require('lodash');

const RECORD_NAME_STRATEGY = 'RecordNameStrategy';
const TOPIC_RECORD_NAME_STRATEGY = 'TopicRecordNameStrategy';

const getRecordName = data => {
return data.containerData[0].code || data.containerData[0].name || data.containerData[0].collectionName;
};

const getConfluentPostQuery = ({ data, schema }) => {
const getName = () => {
const name = getRecordName(data);

const schemaType = _.get(data, 'containerData[0].schemaType');
const containerName = _.get(data, 'containerData[0].name');
const topic = _.get(data, 'modelData[0].schemaTopic');

const typePostfix = schemaType ? `-${schemaType}` : '';
const containerPrefix = containerName ? `${containerName}.` : '';
const topicPrefix = topic ? `${topic}-` : '';

const schemaNameStrategy = _.get(data, 'modelData[0].schemaNameStrategy', '');
switch (schemaNameStrategy) {
case RECORD_NAME_STRATEGY:
return `${containerPrefix}${name}${typePostfix}`;
case TOPIC_RECORD_NAME_STRATEGY:
return `${topicPrefix}${containerPrefix}${name}${typePostfix}`;
default:
return `${name}${typePostfix}`;
}
};

return `POST /subjects/${getName()}/versions\n\n${schema}`;
};

module.exports = {
getConfluentPostQuery,
};
31 changes: 31 additions & 0 deletions forward_engineering/helpers/getPulsarPostQuery.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
const _ = require('lodash');
const protobufjs = require('protobufjs');
const descriptor = require('protobufjs/ext/descriptor');

const getPulsarPostQuery = ({ data, schema }) => {
const root = protobufjs.parse(schema).root;
const descriptorMsg = root.toDescriptor('proto3');
const buffer = descriptor.FileDescriptorSet.encode(descriptorMsg).finish();
const fileDescriptorSet = buffer.toString('base64');
const descriptorJson = descriptorMsg.toJSON();
const rootMessageTypeName = `${_.get(descriptorJson, 'file[0].package')}.${_.get(descriptorJson, 'file[0].messageType[0].name')}`;
const rootFileDescriptorName = _.get(descriptorJson, 'file[0].name');
const body = {
fileDescriptorSet,
rootMessageTypeName,
rootFileDescriptorName,
};
const bodyObject = {
type: 'PROTOBUF_NATIVE',
data: body,
properties: {},
};
const namespace = _.get(data, 'containerData[0].name', '');
const topic = _.get(data, 'containerData[0].pulsarTopicName', '');
const persistence = _.get(data, 'containerData[0].isNonPersistentTopic', false) ? 'non-persistent' : 'persistent';
return `POST /${persistence}/${namespace}/${topic}/schema\n\n${JSON.stringify(bodyObject, null, 4)}`;
};

module.exports = {
getPulsarPostQuery,
};
19 changes: 19 additions & 0 deletions forward_engineering/helpers/prepareScript.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
const _ = require('lodash');
const { getConfluentPostQuery } = require('./getConfluentPostQuery');
const { getPulsarPostQuery } = require('./getPulsarPostQuery');

const prepareScript = (script, data) => {
const targetSchemaRegistry = _.get(data, 'options.targetScriptOptions.keyword');
if (targetSchemaRegistry === 'confluentSchemaRegistry') {
return getConfluentPostQuery({ data, schema: script });
}
if (targetSchemaRegistry === 'pulsarSchemaRegistry') {
return getPulsarPostQuery({ data, schema: script });
}

return script;
};

module.exports = {
prepareScript,
};
9 changes: 1 addition & 8 deletions forward_engineering/services/protoScriptGenerationService.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
const { dependencies } = require('../../reverse_engineering/appDependencies');
const _ = require('lodash');
const {
parseDefinitions,
getDefinitionInfo,
Expand Down Expand Up @@ -98,7 +98,6 @@ const getMessageStatement = ({
modelDefinitions,
externalDefinitions,
}) => {
const _ = dependencies.lodash;
if (jsonSchema.$ref) {
const definitionName = jsonSchema.$ref.slice('#model/definitions/'.length);
jsonSchema = modelDefinitions.find(definition => definition.title === definitionName) || jsonSchema;
Expand Down Expand Up @@ -162,7 +161,6 @@ const concutFieldsStatements = (fieldsStatements, oneOfStatement, oneOfIndex) =>

const getOneOfStatement = (oneOfMeta, fields, spacePrefix = '') => {
const oneOfName = oneOfMeta?.name || 'one_of';
const _ = dependencies.lodash;
if (_.isEmpty(fields)) {
return '';
}
Expand Down Expand Up @@ -206,7 +204,6 @@ const getOptionStatement = (option, spacePrefix) => {
};

const getReservedStatements = (data, spacePrefix) => {
const _ = dependencies.lodash;
const reservedFieldNames = !_.isEmpty(data.reservedFieldNames)
? `${spacePrefix}reserved ${data.reservedFieldNames};`
: ``;
Expand All @@ -228,7 +225,6 @@ const getFieldsStatement = ({
externalDefinitions,
oneOfIndex,
}) => {
const _ = dependencies.lodash;
const oneOfFields = Object.entries(
(jsonSchema.oneOf || []).reduce((properties, property) => ({ ...properties, ...property.properties }), {}),
).reduce((oneOfProperties, [key, value]) => ({ ...oneOfProperties, [key]: { ...value, parent: 'oneOf' } }), {});
Expand Down Expand Up @@ -303,7 +299,6 @@ const getFieldInfo = ({
externalDefinitions,
}) => {
const getUDT = udt => {
const _ = dependencies.lodash;
return !_.isEmpty(udt) ? udt : 'string';
};
if (isExternalRef) {
Expand Down Expand Up @@ -352,8 +347,6 @@ const getValidatedFieldRule = ({ fieldRule, protoVersion }) => {
};

const getFieldOptionsStatement = options => {
const _ = dependencies.lodash;

const stringifiedOptions = (options || [])
.filter(option => option?.optionKey && option?.optionValue)
.filter(option => option.optionKey !== 'allow_alias')
Expand Down
Loading