diff --git a/README.md b/README.md index 6a572c08..0cf3a0db 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Sample Data ## Overview -This repo contains data that is uploaded into MOSIP during [sandbox installation](https://docs.mosip.io/1.2.0/deployment/sandbox-deployment). The data needs to be reviewed and modified for a country specific deployment. Refer to [Masterdata Guide](https://docs.mosip.io/1.2.0/deployment/masterdata-guide). +This repo contains data that is uploaded into MOSIP during [sandbox installation](https://docs.mosip.io/1.2.0/setup/deploymentnew/getting-started#mosip-installations). The data needs to be reviewed and modified for a country specific deployment. Refer to [Masterdata Guide](https://docs.mosip.io/1.2.0/id-lifecycle-management/support-systems/administration/masterdata-guide). ## For Build and Run Data initialization is performed through the **Master Data Loader** as part of the [postgres-init](https://github.com/mosip/postgres-init/tree/release-1.3.x) repository. diff --git a/mosip_master_csv/csv/applicant_valid_document.csv b/mosip_master/csv/applicant_valid_document.csv similarity index 100% rename from mosip_master_csv/csv/applicant_valid_document.csv rename to mosip_master/csv/applicant_valid_document.csv diff --git a/mosip_master_csv/csv/blocklisted_words.csv b/mosip_master/csv/blocklisted_words.csv similarity index 100% rename from mosip_master_csv/csv/blocklisted_words.csv rename to mosip_master/csv/blocklisted_words.csv diff --git a/mosip_master_csv/csv/daysofweek_list.csv b/mosip_master/csv/daysofweek_list.csv similarity index 100% rename from mosip_master_csv/csv/daysofweek_list.csv rename to mosip_master/csv/daysofweek_list.csv diff --git a/mosip_master_csv/csv/device_master.csv b/mosip_master/csv/device_master.csv similarity index 100% rename from mosip_master_csv/csv/device_master.csv rename to mosip_master/csv/device_master.csv diff --git a/mosip_master_csv/csv/device_master_h.csv b/mosip_master/csv/device_master_h.csv similarity index 100% rename from mosip_master_csv/csv/device_master_h.csv rename to mosip_master/csv/device_master_h.csv diff --git a/mosip_master_csv/csv/device_spec.csv b/mosip_master/csv/device_spec.csv similarity index 100% rename from mosip_master_csv/csv/device_spec.csv rename to mosip_master/csv/device_spec.csv diff --git a/mosip_master_csv/csv/device_type.csv b/mosip_master/csv/device_type.csv similarity index 100% rename from mosip_master_csv/csv/device_type.csv rename to mosip_master/csv/device_type.csv diff --git a/mosip_master_csv/csv/doc_category.csv b/mosip_master/csv/doc_category.csv similarity index 100% rename from mosip_master_csv/csv/doc_category.csv rename to mosip_master/csv/doc_category.csv diff --git a/mosip_master_csv/csv/doc_type.csv b/mosip_master/csv/doc_type.csv similarity index 100% rename from mosip_master_csv/csv/doc_type.csv rename to mosip_master/csv/doc_type.csv diff --git a/mosip_master_csv/csv/dynamic_field.csv b/mosip_master/csv/dynamic_field.csv similarity index 100% rename from mosip_master_csv/csv/dynamic_field.csv rename to mosip_master/csv/dynamic_field.csv diff --git a/mosip_master_csv/csv/identity_schema.csv b/mosip_master/csv/identity_schema.csv similarity index 100% rename from mosip_master_csv/csv/identity_schema.csv rename to mosip_master/csv/identity_schema.csv diff --git a/mosip_master_csv/csv/language.csv b/mosip_master/csv/language.csv similarity index 100% rename from mosip_master_csv/csv/language.csv rename to mosip_master/csv/language.csv diff --git a/mosip_master_csv/csv/loc_hierarchy_list.csv b/mosip_master/csv/loc_hierarchy_list.csv similarity index 100% rename from mosip_master_csv/csv/loc_hierarchy_list.csv rename to mosip_master/csv/loc_hierarchy_list.csv diff --git a/mosip_master_csv/csv/loc_holiday.csv b/mosip_master/csv/loc_holiday.csv similarity index 100% rename from mosip_master_csv/csv/loc_holiday.csv rename to mosip_master/csv/loc_holiday.csv diff --git a/mosip_master_csv/csv/location.csv b/mosip_master/csv/location.csv similarity index 100% rename from mosip_master_csv/csv/location.csv rename to mosip_master/csv/location.csv diff --git a/mosip_master_csv/csv/machine_master.csv b/mosip_master/csv/machine_master.csv similarity index 100% rename from mosip_master_csv/csv/machine_master.csv rename to mosip_master/csv/machine_master.csv diff --git a/mosip_master_csv/csv/machine_master_h.csv b/mosip_master/csv/machine_master_h.csv similarity index 100% rename from mosip_master_csv/csv/machine_master_h.csv rename to mosip_master/csv/machine_master_h.csv diff --git a/mosip_master_csv/csv/machine_spec.csv b/mosip_master/csv/machine_spec.csv similarity index 100% rename from mosip_master_csv/csv/machine_spec.csv rename to mosip_master/csv/machine_spec.csv diff --git a/mosip_master_csv/csv/machine_type.csv b/mosip_master/csv/machine_type.csv similarity index 100% rename from mosip_master_csv/csv/machine_type.csv rename to mosip_master/csv/machine_type.csv diff --git a/mosip_master_csv/csv/rcid_seq.csv b/mosip_master/csv/rcid_seq.csv similarity index 100% rename from mosip_master_csv/csv/rcid_seq.csv rename to mosip_master/csv/rcid_seq.csv diff --git a/mosip_master_csv/csv/reason_category.csv b/mosip_master/csv/reason_category.csv similarity index 100% rename from mosip_master_csv/csv/reason_category.csv rename to mosip_master/csv/reason_category.csv diff --git a/mosip_master_csv/csv/reason_list.csv b/mosip_master/csv/reason_list.csv similarity index 100% rename from mosip_master_csv/csv/reason_list.csv rename to mosip_master/csv/reason_list.csv diff --git a/mosip_master_csv/csv/reg_center_type.csv b/mosip_master/csv/reg_center_type.csv similarity index 100% rename from mosip_master_csv/csv/reg_center_type.csv rename to mosip_master/csv/reg_center_type.csv diff --git a/mosip_master_csv/csv/reg_exceptional_holiday.csv b/mosip_master/csv/reg_exceptional_holiday.csv similarity index 100% rename from mosip_master_csv/csv/reg_exceptional_holiday.csv rename to mosip_master/csv/reg_exceptional_holiday.csv diff --git a/mosip_master_csv/csv/reg_working_nonworking.csv b/mosip_master/csv/reg_working_nonworking.csv similarity index 100% rename from mosip_master_csv/csv/reg_working_nonworking.csv rename to mosip_master/csv/reg_working_nonworking.csv diff --git a/mosip_master_csv/csv/registration_center.csv b/mosip_master/csv/registration_center.csv similarity index 100% rename from mosip_master_csv/csv/registration_center.csv rename to mosip_master/csv/registration_center.csv diff --git a/mosip_master_csv/csv/registration_center_h.csv b/mosip_master/csv/registration_center_h.csv similarity index 100% rename from mosip_master_csv/csv/registration_center_h.csv rename to mosip_master/csv/registration_center_h.csv diff --git a/mosip_master_csv/csv/template.csv b/mosip_master/csv/template.csv similarity index 100% rename from mosip_master_csv/csv/template.csv rename to mosip_master/csv/template.csv diff --git a/mosip_master_csv/csv/template_type.csv b/mosip_master/csv/template_type.csv similarity index 100% rename from mosip_master_csv/csv/template_type.csv rename to mosip_master/csv/template_type.csv diff --git a/mosip_master_csv/csv/title.csv b/mosip_master/csv/title.csv similarity index 100% rename from mosip_master_csv/csv/title.csv rename to mosip_master/csv/title.csv diff --git a/mosip_master_csv/csv/ui_spec.csv b/mosip_master/csv/ui_spec.csv similarity index 100% rename from mosip_master_csv/csv/ui_spec.csv rename to mosip_master/csv/ui_spec.csv diff --git a/mosip_master_csv/csv/user_detail.csv b/mosip_master/csv/user_detail.csv similarity index 100% rename from mosip_master_csv/csv/user_detail.csv rename to mosip_master/csv/user_detail.csv diff --git a/mosip_master_csv/csv/user_detail_h.csv b/mosip_master/csv/user_detail_h.csv similarity index 100% rename from mosip_master_csv/csv/user_detail_h.csv rename to mosip_master/csv/user_detail_h.csv diff --git a/mosip_master_csv/csv/valid_document.csv b/mosip_master/csv/valid_document.csv similarity index 100% rename from mosip_master_csv/csv/valid_document.csv rename to mosip_master/csv/valid_document.csv diff --git a/mosip_master_csv/csv/zone.csv b/mosip_master/csv/zone.csv similarity index 100% rename from mosip_master_csv/csv/zone.csv rename to mosip_master/csv/zone.csv diff --git a/mosip_master_csv/csv/zone_user.csv b/mosip_master/csv/zone_user.csv similarity index 100% rename from mosip_master_csv/csv/zone_user.csv rename to mosip_master/csv/zone_user.csv diff --git a/mosip_master_csv/csv/zone_user_h.csv b/mosip_master/csv/zone_user_h.csv similarity index 100% rename from mosip_master_csv/csv/zone_user_h.csv rename to mosip_master/csv/zone_user_h.csv diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/README.md b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/README.md new file mode 100644 index 00000000..1ccccffa --- /dev/null +++ b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/README.md @@ -0,0 +1,46 @@ +## Migrating country specific data from 1.1.5.5 to 1.2.0.1 version + +Prerequisites: +-> SQL migration must be successfully executed. +-> Go to upgrade.properties file and modify the property values as per the environment. + +Note: List of the commands executed during the data upgrade can be found in upgrade_commands.txt. One command per line. Commands using data-uploader.py script cannot be executed after it is successfully executed once. It should be commented for the next execution(upgrade.sh ignores the commented lines). + + +1. Migration of dynamic field table data. + Dynamic value was stored as jsonarray in version 1.1.5*, now in 1.2.0.1 we store it as json object. one entry for each language of the field. + + Script takes the backup of existing table and migrates dynamic field table data into new table created. + +2. UI Spec migration + + In 1.1.5* both Identity schema and UI spec was stored in identity_schema table. From 1.2.0 it is split into 2 different tables, identity_schema and ui_spec. As part of sql upgrade script data split is taken care. + + Here, we take care of migrating old UI spec to new UI spec. + Ref: https://docs.mosip.io/1.2.0/modules/registration-client/registration-client-ui-specifications + -> It is recommended to verify the validators and visibility expressions in migrated UI SPEC. + -> After this migration, one old ui spec will be divided into 3 different ui spec like "newProcess", "updateProcess" and "lostProcess". + -> AGEGROUP_CONFIG in the upgrade.properties should be updated based on the age-group values defined in the property name "mosip.regproc.packet.classifier.tagging.agegroup.ranges" in registration-default.properties file. + + + Refer below API documentation to define and publish UI spec + + https://mosip.github.io/documentation/1.2.0/kernel-masterdata-service.html#operation/defineUISpec + https://mosip.github.io/documentation/1.2.0/kernel-masterdata-service.html#operation/publishUISpec + +3. Template type and Template data change: + + New template types and templates were introduced in 1.2.0.1. All the new types and templates itself are provided in the xlsx file in the same directoy in english, arabic, french, kannada, hindi and tamil languages. + + 1. "id" column in the templates excel sheet is autogenerated before upload to server. + 2. Make sure to remove unsupported languages from the excel files before starting the migration. + 3. Cross check if all the language specific data is valid and correct w.r.t the language.Make the change as required in the template text (file_text column) if required. + + Note: We have introduced "label" and "value" in registration client acknowledgment and preview templates. Data in all the captured languages are slash separated and is provided to the template with "label" and "value" keys. So instead of "primaryLabel" and "primaryValue", "secondaryLabel" and "secondaryValue" use "label" and "value". To be backward compatibile, we still provide support for "primaryLabel" and "primaryValue" in 1.2.0.1. + +4. Machine Type, Machine Specification & Zone User mapping: + + Resident service is introduced as new machine type and corresponding machine specification is added. + And most importantly, Resident service client is mapped to the top most zone in the zone heirarchy (country code). + + Note: In zone_user_delta.xlsx, resident service client is mapped to a dummy zone "MOR". Before execution of the upgrade.sh script, update the zone_code to appropriate value and save the changes. diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/data-uploader.py b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/data-uploader.py new file mode 100644 index 00000000..8f2ec576 --- /dev/null +++ b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/data-uploader.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- + +#!/usr/bin/python3 + + +## This script should be executed after DB upgrade and 1.2.0.* masterdata-service deployment + +from datetime import datetime, timezone, timedelta +import argparse +import requests +import json +import sys +import time +import psycopg2 +import openpyxl + +parser = argparse.ArgumentParser(description='This is CSV/xlsx file uploader script.invokes 1.2.0.1 bulk upload endpoints') +parser.add_argument("--domain", type=str, required=True, help="Server domain name, eg: api-internal.dev.mosip.net") +parser.add_argument("--username", type=str, required=True, help="User with GLOBAL_ADMIN & REGISTRATION_ADMIN role") +parser.add_argument("--password", type=str, required=True, help="User password") +parser.add_argument("--table", type=str, required=True, help="Database table name") +parser.add_argument("--operation", type=str, required=True, help="Database operation, eg: Insert or Update or Delete") +parser.add_argument("--file", type=str, required=True, help="Input file CSV or xlsx") +parser.add_argument("--autogen", choices=(1,0), default=0, type=int, required=False, help="Autogenerate value for id column") +parser.add_argument("--idcolumn", type=str, required=False, help="id column name, eg: A or B ...") +parser.add_argument("--sheetname", type=str, required=False, help="Sheet name to operate") +parser.add_argument("--dbusername", type=str, required=False, help="DB username") +parser.add_argument("--dbpassword", type=str, required=False, help="DB username") +parser.add_argument("--dbhost", type=str, required=False, help="DB hostname") +parser.add_argument("--dbport", type=str, required=False, help="DB port number") + +args = parser.parse_args() + +## Values to be updated as per the deployment +authURL='https://'+args.domain+'/v1/authmanager/authenticate/useridPwd' +uploadURL='https://'+args.domain+'/v1/admin/bulkupload' +uploadStatusURL='https://'+args.domain+'/v1/admin/bulkupload/transcation/' +username=args.username +password=args.password + +def getCurrentDateTime(): + dt_now = datetime.now(timezone.utc) + dt_now_str = dt_now.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + return dt_now_str+'Z' + + +def get_seed_value(): + conn = psycopg2.connect(database="mosip_master", user = args.dbusername, password = args.dbpassword, host = args.dbhost, port = args.dbport) + cursor = conn.cursor() + cursor.execute("select id from master."+args.table+" order by id desc limit 20") + for row in cursor.fetchall(): + id_value = row[0] + if id_value is None: + seed_value = 1000 + break + if id_value.isdigit(): + seed_value = id_value + break; + + if seed_value == None: + seed_value = 1000 + return seed_value + + +def find_last_data_row(sheet): + max_row = sheet.max_row + + for row in range(max_row, 0, -1): + for cell in sheet[row]: + if cell.value is not None: + return row + +def fill_series(): + if args.sheetname == None: + print("Sheet name is required to fill series in id column.") + exit(1) + + if args.idcolumn == None: + print("id column name is required to fill series.") + exit(1) + + seed_value = get_seed_value() + + print("Sheet name: ",args.sheetname) + print("Id column to fill series: ", args.idcolumn) + print("Seed value: ", seed_value) + + workbook = openpyxl.load_workbook(args.file) + sheet = workbook[args.sheetname] + column = sheet[args.idcolumn] + + start_row = 2 + end_row = find_last_data_row(sheet) + + print("Start Row: ", start_row) + print("End Row: ", end_row) + + if(start_row is None and end_row is None): + print("Need a valid start_row and end_row!") + return + + for i, value in enumerate(range(start_row, end_row + 1), start=1): + column[i].value = int(seed_value) + value + + workbook.save(args.file) + workbook.close() + + + +def getAccessToken(): + auth_req_data = { + 'id': 'string', + 'metadata': {}, + 'request': { + 'appId': 'admin', + 'password': password, + 'userName': username + }, + 'requesttime': getCurrentDateTime(), + 'version': 'string' + } + authresponse=requests.post(authURL, json= auth_req_data) + print(json.dumps(authresponse.json())) + return authresponse.headers["authorization"] + + + +def uploadFile(): + if args.autogen == 1 : + fill_series() + + data = {'category': 'masterdata', 'operation': args.operation, 'tableName': args.table} + files = {'files': open(args.file, 'rb')} + uploadResponse = requests.post(uploadURL, data=data, files=files, headers=req_headers, verify=True) + uploadResponse_json = uploadResponse.json() + response = uploadResponse_json['response'] + print(json.dumps(uploadResponse_json)) + return response['transcationId'] + + +def getTransactionStatus(transactionId): + statusResponse = requests.get(uploadStatusURL+transactionId, headers=req_headers, verify=True) + statusResponse_json = statusResponse.json() + response = statusResponse_json['response'] + return response + + +req_headers={'Cookie' : 'Authorization='+getAccessToken()} +transactionId = uploadFile() +while True: + time.sleep(5) ## sleep for 5 seconds + status_response = getTransactionStatus(transactionId) + print(json.dumps(status_response)) + status = status_response["status"] + if status == "COMPLETED": + break + if status == "FAILED": + sys.exit("Transcation failed") + + + + diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/machine_spec_delta.xlsx b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/machine_spec_delta.xlsx new file mode 100644 index 00000000..c1edab57 Binary files /dev/null and b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/machine_spec_delta.xlsx differ diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/machine_type_delta.xlsx b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/machine_type_delta.xlsx new file mode 100644 index 00000000..f7082409 Binary files /dev/null and b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/machine_type_delta.xlsx differ diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/migration-dynamicfield.py b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/migration-dynamicfield.py new file mode 100644 index 00000000..a946ec09 --- /dev/null +++ b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/migration-dynamicfield.py @@ -0,0 +1,90 @@ +#!/usr/bin/python3 + +import psycopg2 +import json +import sys + +conn = psycopg2.connect(database="mosip_master", user = sys.argv[1], password = sys.argv[2], host = sys.argv[3], port = sys.argv[4]) + +print("Opened database successfully") + +cur = conn.cursor() + +#Backup existing dynamic_field table +cur.execute('ALTER TABLE master.dynamic_field RENAME TO dynamic_field_migr_bkp;') + +print("Renamed dynamic_field table to dynamic_field_migr_bkp") + +#Create dynamic_field table +cur.execute('''CREATE TABLE master.dynamic_field( + id character varying(36) NOT NULL, + name character varying(36) NOT NULL, + description character varying(256), + data_type character varying(16), + value_json character varying, + lang_code character varying(3) NOT NULL, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_dynamic_id PRIMARY KEY (id));''') + +print("created table dynamic_field") + + +cur.execute('GRANT SELECT,INSERT,UPDATE,DELETE,TRUNCATE,REFERENCES ON master.dynamic_field TO masteruser;') +print("Applied grant on dynamic_field") + +#Query all the records from backup table +cur.execute('select * from master.dynamic_field_migr_bkp;') +rows = cur.fetchall() + +print("Data fetched from backup table") + +list_entities = [] + +#Iterate through each row and create new insert statements +for row in rows: + values = json.loads(row[4]) + for val in values: + vmap = {'code' : val['code'], 'value': val['value']} + list_entities.append(json.dumps({"name": row[1], "langCode" : val['langCode'], "value_json": json.dumps(vmap), "is_active": row[6]})) + + +#Query all the records from gender table +cur.execute('select * from master.gender;') +gender_rows = cur.fetchall() +for row in gender_rows: + vmap = {'code' : row[0], 'value': row[1]} + list_entities.append(json.dumps({"name": sys.argv[5], "langCode" : row[2],"value_json": json.dumps(vmap), "is_active": row[3]})) + + +#Query all the records from individual_type table +cur.execute('select * from master.individual_type;') +individual_type_rows = cur.fetchall() +for row in individual_type_rows: + vmap = {'code' : row[0], 'value': row[1]} + list_entities.append(json.dumps({"name": sys.argv[6], "langCode" : row[2],"value_json": json.dumps(vmap), "is_active": row[3]})) + + +id = 1000 +stmt = 'insert into dynamic_field values (%s,%s,%s,%s,%s,%s,%s,%s,now(),NULL,NULL,False,NULL);' +unique_entities = set(list_entities) +for entity_str in unique_entities: + id = id + 1 + entity = json.loads(entity_str) + status = False + if(entity['is_active'] == True): + status = True + #Execute the insert statement + cur.execute(stmt, (str(id), entity['name'], entity['name'], 'string', entity['value_json'], entity['langCode'], status, 'migration-script')) + + +# Commit and close connection +conn.commit() + +print("Closing the database connection") +conn.close() diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/migration-ui_spec.py b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/migration-ui_spec.py new file mode 100644 index 00000000..45697832 --- /dev/null +++ b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/migration-ui_spec.py @@ -0,0 +1,814 @@ +# -*- coding: utf-8 -*- + +#!/usr/bin/python3 + + +## This script should be executed after DB upgrade and 1.2.0.* masterdata-service deployment + +from datetime import datetime, timezone, timedelta +import argparse +import requests +import json +import sys + +parser = argparse.ArgumentParser(description='This is UI spec migration script. Migrates 1.1.5.5 UI spec to 1.2.0 compatible SPEC and the same is published to the server. This script should be executed after DB upgrade and 1.2.0.* masterdata-service deployment.') +parser.add_argument("-d", "--domain", type=str, required=True, help="Server domain name, eg: dev.mosip.net") +parser.add_argument("-u", "--username", type=str, required=True, help="User with GLOBAL_ADMIN & REGISTRATION_ADMIN role") +parser.add_argument("-p", "--password", type=str, required=True, help="User password") +parser.add_argument("-pl", "--primaryLanguage", type=str, required=True, help="3 letter primary language code as used in 1.1.5.5") +parser.add_argument("-sl", "--secondaryLanguage", type=str, required=True, help="3 letter secondary language code as used in 1.1.5.5") +parser.add_argument("--identityMappingJsonUrl", type=str, required=True, help="URL to download identity_mapping.json") +parser.add_argument("--ageGroupConfig", type=str, required=True, help="Age group configuration") +parser.add_argument("--infantAgeGroup", type=str, required=True, help="Infant Age group name") +parser.add_argument("--allowedBioAttributes", type=str, required=True, help="Comma separated list of allowed biometric attributes") + +args = parser.parse_args() + + +## Values to be updated as per the deployment +authURL='https://'+args.domain+'/v1/authmanager/authenticate/useridPwd' +schemaURL='https://'+args.domain+'/v1/syncdata/latestidschema?schemaVersion=0' +uispecURL='https://'+args.domain+'/v1/masterdata/uispec' +uispecPublishURL='https://'+args.domain+'/v1/masterdata/uispec/publish' +primaryLang=args.primaryLanguage +secondaryLang=args.secondaryLanguage +username=args.username +password=args.password +agegroup_config=args.ageGroupConfig +infantAgeGroup = args.infantAgeGroup.strip() +allBioAttributes= args.allowedBioAttributes.strip().split(",") + +## values loaded from identity-mapping.json +individual_bio_field=None +auth_bio_field=None +guardian_bio_field=None +guardian_demo_fields=[] +ageGroupBasedModalities = {} +ageGroupRequiresGuardian = [] + +def getSupportedAgeGroups(): + agegroup_config_json=json.loads(agegroup_config) + for ageGroup in agegroup_config_json.keys(): + modalities = [] + while not modalities: + modalities = agegroup_config_json.get(ageGroup).get("bioAttributes") + ageGroupBasedModalities[ageGroup] = modalities + + requiresGuardianAuth = agegroup_config_json.get(ageGroup).get("isGuardianAuthRequired") + if(requiresGuardianAuth == True): + ageGroupRequiresGuardian.append(ageGroup) + + +def getConditionalBioAttributes(): + conditionalBioAttributes = [] + for ageGroup in ageGroupBasedModalities.keys(): + bioAttributes = ageGroupBasedModalities.get(ageGroup) + if(len(bioAttributes) < 13): + conditionalBioAttributes.append({ + "ageGroup": ageGroup, + "process": "ALL", + "validationExpr": " && ".join(bioAttributes), + "bioAttributes": bioAttributes + }) + return conditionalBioAttributes + + +def getGaurdianConditionalBioAttributes(): + conditionalBioAttributes = [] + for ageGroup in ageGroupBasedModalities.keys(): + if ageGroup in ageGroupRequiresGuardian: + conditionalBioAttributes.append({ + "ageGroup": ageGroup, + "process": "ALL", + "validationExpr": " || ".join(allBioAttributes), + "bioAttributes": allBioAttributes + }) + return conditionalBioAttributes + + +def getGaurdianFieldRequiredOn(): + exprs = [] + for ageGroup in ageGroupBasedModalities.keys(): + if ageGroup in ageGroupRequiresGuardian: + exprs.append("identity.get('ageGroup') == '"+ageGroup+"'") + return [{ "engine": "MVEL", "expr": " || ".join(exprs) }] + + +def getCurrentDateTime(): + dt_now = datetime.now(timezone.utc) + dt_now_str = dt_now.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + return dt_now_str+'Z' + +def isValidBioFieldIds(values): + if individual_bio_field.get('value') in values and auth_bio_field.get('value') in values and guardian_bio_field.get('value') in values: + return True + else: + print(values) + return False + +def getGuardianDemographicFieldGroup(demographics): + guardian_group = None + for field in demographics: + if field['id'] in guardian_demo_fields: + guardian_group = field['group'] + break + + if guardian_group == None: + sys.exit("Kindly check the provided Guardian/Introducer demographic field Id (Any one).") + + return guardian_group + + +def getAccessToken(): + auth_req_data = { + 'id': 'string', + 'metadata': {}, + 'request': { + 'appId': 'admin', + 'password': password, + 'userName': username + }, + 'requesttime': getCurrentDateTime(), + 'version': 'string' + } + authresponse=requests.post(authURL, json= auth_req_data) + print(authresponse) + return authresponse.headers["authorization"] + + +def publish_spec(domain, spec_type, specjson): + print("identity schema id : " + identity_schema_id) + print("identity spec_type : " + spec_type) + spec = json.dumps(specjson) + spec = spec.replace("identity.?isChild", "identity.get('ageGroup') == '"+infantAgeGroup+"'") + spec = spec.replace("identity.isChild", "identity.get('ageGroup') == '"+infantAgeGroup+"'") + request_json = { + "id": "string", + "version": "string", + "requesttime": getCurrentDateTime(), + "metadata": {}, + "request": { + "identitySchemaId": identity_schema_id, + "domain": domain, + "type": spec_type, + "title": spec_type + " UI spec", + "description": spec_type + " UI spec", + "jsonspec": json.loads(spec) + } + } + spec_resp = requests.post(uispecURL, json=request_json, headers=req_headers) + spec_resp_json = spec_resp.json() + #print("UI spec POST response : " + json.dumps(spec_resp_json)) + + spec_resp_json_2 = spec_resp_json['response'] + spec_id = spec_resp_json_2['id'] + dt_now = datetime.now(timezone.utc) + dt_now = dt_now + timedelta(minutes=2) + dt_now_str = dt_now.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + + print("UI spec publish request spec_id: " + spec_id) + + publish_spec_req = { + "id": "string", + "version": "string", + "requesttime": getCurrentDateTime(), + "metadata": {}, + "request": { + "id": spec_id, + "effectiveFrom": dt_now_str+'Z' + } + } + print("UI spec publish request : " + json.dumps(publish_spec_req)) + publish_resp = requests.put(uispecPublishURL, json=publish_spec_req, headers=req_headers) + publish_resp_json = publish_resp.json() + print("UI spec published : " + json.dumps(publish_resp_json)) + + +def getConsentFields(): + return [{ "id": "IDSchemaVersion", + "inputRequired": False, + "type": "number", + "minimum": 0, + "maximum": 0, + "description": "ID Schema Version", + "label": { + "eng": "IDSchemaVersion" + }, + "fieldType": "default", + "format": "none", + "validators": [], + "fieldCategory": "none", + "transliterate": False, + "required": True, + "requiredOn": [], + "subType": "IdSchemaVersion", + "exceptionPhotoRequired": False + }, { + "id": "consentText", + "inputRequired": True, + "type": "simpleType", + "minimum": 0, + "maximum": 0, + "description": "Consent", + "label": {}, + "controlType": "html", + "fieldType": "default", + "format": "none", + "validators": [], + "fieldCategory": "evidence", + "group": "consentText", + "transliterate": False, + "templateName": "reg-consent-template", + "required": True, + "requiredOn": [], + "subType": "consentText", + "exceptionPhotoRequired": False + }, { + "id": "consent", + "inputRequired": True, + "type": "string", + "minimum": 0, + "maximum": 0, + "description": "consent accepted", + "label": { + "ara": "الاسم الكامل الكامل الكامل", + "fra": "J'ai lu et j'accepte les termes et conditions pour partager mes PII", + "eng": "I have read and accept terms and conditions to share my PII" + }, + "controlType": "checkbox", + "fieldType": "default", + "format": "none", + "validators": [], + "fieldCategory": "evidence", + "group": "consent", + "transliterate": False, + "required": True, + "requiredOn": [], + "subType": "consent", + "exceptionPhotoRequired": False + }] + + +def buildNewRegistrationSpec(demographic_fields, document_fields, biometric_fields): + spec = { + "id": "NEW", + "order": 1, + "flow": "NEW", + "label": { + "eng": "New Registration", + "ara": "تسجيل جديد", + "fra": "Nouvelle inscription" + }, + "screens": [{ + "order": 1, + "name": "consentdet", + "label": { + "ara": "موافقة", + "fra": "Consentement", + "eng": "Consent" + }, + "caption": { + "ara": "موافقة", + "fra": "Consentement", + "eng": "Consent" + }, + "fields": getConsentFields(), + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + },{ + "order": 2, + "name": "DemographicsDetails", + "label": { + "ara": "التفاصيل الديموغرافية", + "fra": "Détails démographiques", + "eng": "Demographic Details" + }, + "caption": { + "ara": "التفاصيل الديموغرافية", + "fra": "Détails démographiques", + "eng": "Demographic Details" + }, + "fields": demographic_fields, + "preRegFetchRequired": True, + "additionalInfoRequestIdRequired": False, + "active": False + }, + { + "order": 3, + "name": "DocumentDetails", + "label": { + "ara": "تحميل الوثيقة", + "fra": "Des documents", + "eng": "Document Upload" + }, + "caption": { + "ara": "وثائق", + "fra": "Des documents", + "eng": "Documents" + }, + "fields": document_fields, + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + } + ], + "caption": { + "eng": "New Registration", + "ara": "تسجيل جديد", + "fra": "Nouvelle inscription" + }, + "icon": "NewReg.png", + "isActive": True, + } + + for field in biometric_fields: + if field['id'] == individual_bio_field.get('value'): + individualBioField = {key: value for key, value in field.items()} + individualBioField["conditionalBioAttributes"]=getConditionalBioAttributes() + individualBioField["required"] = True + individualBioField["requiredOn"] = [] + individualBioField["exceptionPhotoRequired"] = True + individualBioField["subType"] = "applicant" + + spec['screens'].append({ + "order": 4, + "name": "IndividualBiometricDetails", + "label": individualBioField["label"], + "caption": individualBioField["label"], + "fields": [individualBioField], + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + }) + + if field['id'] == guardian_bio_field.get('value'): + guardianBioField = {key: value for key, value in field.items()} + guardianBioField["conditionalBioAttributes"]=getGaurdianConditionalBioAttributes() + guardianBioField["required"] = False + guardianBioField["requiredOn"] = getGaurdianFieldRequiredOn() + guardianBioField["subType"] = "introducer" + + spec['screens'].append({ + "order": 5, + "name": "GaurdianBiometricDetails", + "label": guardianBioField["label"], + "caption": guardianBioField["label"], + "fields": [guardianBioField], + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + }) + return spec + + +def buildSettingsSpec(): + return [{ + "name": "scheduledjobs", + "description": { + "ara": "إعدادات الوظائف المجدولة", + "fra": "Paramètres des travaux planifiés", + "eng": "Scheduled Jobs Settings" + }, + "label": { + "ara": "إعدادات الوظائف المجدولة", + "fra": "Paramètres des travaux planifiés", + "eng": "Scheduled Jobs Settings" + }, + "fxml": "ScheduledJobsSettings.fxml", + "icon": "scheduledjobs.png", + "order": "1", + "shortcut-icon": "scheduledjobs-shortcut.png", + "access-control": ["REGISTRATION_SUPERVISOR"] +}, { + "name": "globalconfigs", + "description": { + "ara": "إعدادات التكوين العامة", + "fra": "Paramètres de configuration globale", + "eng": "Global Config Settings" + }, + "label": { + "ara": "إعدادات التكوين العامة", + "fra": "Paramètres de configuration globale", + "eng": "Global Config Settings" + }, + "fxml": "GlobalConfigSettings.fxml", + "icon": "globalconfigs.png", + "order": "2", + "shortcut-icon": "globalconfigs-shortcut.png", + "access-control": ["REGISTRATION_SUPERVISOR", "REGISTRATION_OFFICER"] +}, { + "name": "devices", + "description": { + "ara": "إعدادات الجهاز", + "fra": "Réglages de l'appareil", + "eng": "Device Settings" + }, + "label": { + "ara": "إعدادات الجهاز", + "fra": "Réglages de l'appareil", + "eng": "Device Settings" + }, + "fxml": "DeviceSettings.fxml", + "icon": "devices.png", + "order": "3", + "shortcut-icon": "devices-shortcut.png", + "access-control": ["REGISTRATION_SUPERVISOR", "REGISTRATION_OFFICER"] +}] + +def buildUpdateRegistrationSpec(demographic_fields, document_fields, biometric_fields, guardian_group_name): + spec = { + "id": "UPDATE", + "order": 2, + "flow": "UPDATE", + "label": { + "eng": "Update UIN", + "ara": "تحديث UIN", + "fra": "Mettre à jour l'UIN" + }, + "screens": [{ + "order": 1, + "name": "consentdet", + "label": { + "ara": "موافقة", + "fra": "Consentement", + "eng": "Consent" + }, + "caption": { + "ara": "موافقة", + "fra": "Consentement", + "eng": "Consent" + }, + "fields": getConsentFields(), + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + },{ + "order": 2, + "name": "DemographicsDetails", + "label": { + "ara": "التفاصيل الديموغرافية", + "fra": "Détails démographiques", + "eng": "Demographic Details" + }, + "caption": { + "ara": "التفاصيل الديموغرافية", + "fra": "Détails démographiques", + "eng": "Demographic Details" + }, + "fields": demographics, + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + }, + { + "order": 3, + "name": "DocumentDetails", + "label": { + "ara": "تحميل الوثيقة", + "fra": "Des documents", + "eng": "Document Upload" + }, + "caption": { + "ara": "وثائق", + "fra": "Des documents", + "eng": "Documents" + }, + "fields": documents, + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + }, + { + "order": 4, + "name": "BiometricDetails", + "label": { + "ara": "التفاصيل البيومترية", + "fra": "Détails biométriques", + "eng": "Biometric Details" + }, + "caption": { + "ara": "التفاصيل البيومترية", + "fra": "Détails biométriques", + "eng": "Biometric Details" + }, + "fields": biometrics, + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + } + ], + "autoSelectedGroups": ["Consent","Documents","Biometrics"], + "caption": { + "eng": "Update UIN", + "ara": "تحديث UIN", + "fra": "Mettre à jour l'UIN" + }, + "icon": "UINUpdate.png", + "isActive": True + } + + + for field in biometric_fields: + if field['id'] == individual_bio_field.get('value'): + individualBioField = {key: value for key, value in field.items()} + individualBioField["conditionalBioAttributes"]=getConditionalBioAttributes() + individualBioField["exceptionPhotoRequired"] = True + individualBioField["required"] = True + individualBioField["group"] = "Biometrics" + individualBioField["groupLabel"] = { + "ara": "القياسات الحيوية", + "fra": "Biométrie", + "eng": "Biometrics" + } + individualBioField["requiredOn"] = [{ + "engine": "MVEL", + "expr": "identity.updatableFieldGroups contains 'Biometrics'" + }] + individualBioField["subType"] = "applicant" + + spec['screens'].append({ + "order": 4, + "name": "IndividualBiometricDetails", + "label": individualBioField["label"], + "caption": individualBioField["label"], + "fields": [individualBioField], + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + }) + + if field['id'] == auth_bio_field.get('value'): + authBioField = {key: value for key, value in field.items()} + authBioField["conditionalBioAttributes"]=[{ + "ageGroup": "ALL", + "process": "ALL", + "validationExpr": " || ".join(allBioAttributes), + "bioAttributes": allBioAttributes + }] + authBioField["required"] = False + authBioField["group"] = "Biometrics" + authBioField["groupLabel"] = { + "ara": "القياسات الحيوية", + "fra": "Biométrie", + "eng": "Biometrics" + } + authBioField["requiredOn"] = [{ + "engine": "MVEL", + "expr": "!(identity.get('ageGroup') == '"+infantAgeGroup+"') && !(identity.updatableFieldGroups contains 'Biometrics')" + }] + authBioField["subType"] = "applicant-auth" + + spec['screens'].append({ + "order": 5, + "name": "AuthBiometricDetails", + "label": authBioField["label"], + "caption": authBioField["label"], + "fields": [authBioField], + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + }) + + if field['id'] == guardian_bio_field.get('value'): + guardianBioField = {key: value for key, value in field.items()} + guardianBioField["conditionalBioAttributes"]=[{ + "ageGroup": "ALL", + "process": "ALL", + "validationExpr": " || ".join(allBioAttributes), + "bioAttributes": allBioAttributes + }] + guardianBioField["group"] = "Biometrics" + guardianBioField["required"] = False + guardianBioField["requiredOn"] = [{ + "engine": "MVEL", + "expr": "identity.get('ageGroup') == '"+infantAgeGroup+"' || identity.updatableFieldGroups contains '"+guardian_group_name+"'" + }] + guardianBioField["subType"] = "introducer" + + spec['screens'].append({ + "order": 6, + "name": "GuardianBiometricDetails", + "label": guardianBioField["label"], + "caption": guardianBioField["label"], + "fields": [guardianBioField], + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + }) + return spec + + +def buildLostRegistrationSpec(demographic_fields, document_fields, biometric_fields): + spec = { + "id": "LOST", + "order": 3, + "flow": "LOST", + "label": { + "eng": "Lost UIN", + "ara": "فقدت UIN", + "fra": "UIN perdu" + }, + "screens": [{ + "order": 1, + "name": "consentdet", + "label": { + "ara": "موافقة", + "fra": "Consentement", + "eng": "Consent" + }, + "caption": { + "ara": "موافقة", + "fra": "Consentement", + "eng": "Consent" + }, + "fields": getConsentFields(), + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + },{ + "order": 2, + "name": "DemographicsDetails", + "label": { + "ara": "التفاصيل الديموغرافية", + "fra": "Détails démographiques", + "eng": "Demographic Details" + }, + "caption": { + "ara": "التفاصيل الديموغرافية", + "fra": "Détails démographiques", + "eng": "Demographic Details" + }, + "fields": demographics, + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + }, + { + "order": 3, + "name": "DocumentDetails", + "label": { + "ara": "تحميل الوثيقة", + "fra": "Des documents", + "eng": "Document Upload" + }, + "caption": { + "ara": "وثائق", + "fra": "Des documents", + "eng": "Documents" + }, + "fields": documents, + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + } + ], + "caption": { + "eng": "Lost UIN", + "ara": "فقدت UIN", + "fra": "UIN perdu" + }, + "icon": "LostUIN.png", + "isActive": True + } + + for field in biometric_fields: + if field['id'] == individual_bio_field.get('value'): + individualBioField = {key: value for key, value in field.items()} + individualBioField["conditionalBioAttributes"]=getConditionalBioAttributes() + individualBioField["required"] = True + individualBioField["requiredOn"] = [] + individualBioField["subType"] = "applicant" + + spec['screens'].append({ + "order": 4, + "name": "BiometricDetails", + "label": individualBioField["label"], + "caption": individualBioField["label"], + "fields": [individualBioField], + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + }) + + if field['id'] == guardian_bio_field.get('value'): + guardianBioField = {key: value for key, value in field.items()} + guardianBioField["conditionalBioAttributes"]=[{ + "ageGroup": "ALL", + "process": "ALL", + "validationExpr": " || ".join(allBioAttributes), + "bioAttributes": allBioAttributes + }] + guardianBioField["group"] = "Biometrics" + guardianBioField["required"] = False + guardianBioField["requiredOn"] = [{ + "engine": "MVEL", + "expr": "identity.get('ageGroup') == '"+infantAgeGroup+"'" + }] + guardianBioField["subType"] = "introducer" + + spec['screens'].append({ + "order": 5, + "name": "GuardianBiometricDetails", + "label": guardianBioField["label"], + "caption": guardianBioField["label"], + "fields": [guardianBioField], + "preRegFetchRequired": False, + "additionalInfoRequestIdRequired": False, + "active": False + }) + + for field in demographics: + field["required"] = False + + return spec + + + + +# invoke syncdata service with authtoken in headers +req_headers={'Cookie' : 'Authorization='+getAccessToken()} +get_schema_resp=requests.get(schemaURL, headers=req_headers) +print(get_schema_resp) +schema_resp_json=get_schema_resp.json() +schema_resp=schema_resp_json['response'] +identity_schema_id=schema_resp['id'] +cur_schema=schema_resp['schema'] +domain='registration-client' + + +demographics=[] +documents=[] +biometrics=[] + +# read response json and create UI-specs +for field in cur_schema: + if(field['inputRequired']): + #Add labels + labels=field['label'] + labels[primaryLang]=labels['primary'] + if(labels.get('secondary') != None): + labels[secondaryLang]=labels['secondary'] + field['label']=labels + + if field['type'] == 'documentType': + documents.append(field) + elif field['type'] == 'biometricsType': + biometrics.append(field) + else: + if field['fieldType'] == 'dynamic': + newSubType = field['id'] + field['subType'] = newSubType + demographics.append(field) + + +#set all the required field mappings +response = requests.get(args.identityMappingJsonUrl) +data = json.loads(response.text) +identity_mapping_json = data['identity'] +if(identity_mapping_json.get('individualBiometrics') != None): + individual_bio_field=identity_mapping_json.get('individualBiometrics') +if(identity_mapping_json.get('introducerBiometrics') != None): + guardian_bio_field=identity_mapping_json.get('introducerBiometrics') +if(identity_mapping_json.get('individualAuthBiometrics') != None): + auth_bio_field=identity_mapping_json.get('individualAuthBiometrics') + +if(identity_mapping_json.get('introducerName') != None): + guardian_demo_fields.append(identity_mapping_json.get('introducerName').get('value')) +if(identity_mapping_json.get('introducerUIN') != None): + guardian_demo_fields.append(identity_mapping_json.get('introducerUIN').get('value')) +if(identity_mapping_json.get('introducerVID') != None): + guardian_demo_fields.append(identity_mapping_json.get('introducerVID').get('value')) +if(identity_mapping_json.get('introducerRID') != None): + guardian_demo_fields.append(identity_mapping_json.get('introducerRID').get('value')) + + +guardian_group = getGuardianDemographicFieldGroup(demographics); + +## should take user input about biometric fields: +bioFieldIds = [] +for field in biometrics: + bioFieldIds.append(field['id']) + +isValid = isValidBioFieldIds(bioFieldIds) +if isValid == False: + sys.exit("Kindly check the biometics field Ids provided as input. Must be one in above valid values") + + +#Read ageGroup config and take the modalities input +getSupportedAgeGroups() + +#publish ui-spec with for new process +publish_spec(domain, 'newProcess', buildNewRegistrationSpec(demographics, documents, biometrics)) + + +#publish ui-spec with for update process +publish_spec(domain, 'updateProcess', buildUpdateRegistrationSpec(demographics, documents, biometrics, guardian_group)) + + +#publish ui-spec with for lost process +publish_spec(domain, 'lostProcess', buildLostRegistrationSpec(demographics, documents, biometrics)) + + +#publish ui-spec with for settings screens +publish_spec(domain, 'settings', buildSettingsSpec()) diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/revoke-migration-dynamicfield.py b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/revoke-migration-dynamicfield.py new file mode 100644 index 00000000..ebb8ccb8 --- /dev/null +++ b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/revoke-migration-dynamicfield.py @@ -0,0 +1,25 @@ +#!/usr/bin/python3 + +import psycopg2 +import json +import sys + +conn = psycopg2.connect(database="mosip_master", user = sys.argv[1], password = sys.argv[2], host = sys.argv[3], port = sys.argv[4]) + +print("Opened database successfully") + +cur = conn.cursor() + +#DROP existing dynamic_field table +cur.execute('DROP TABLE IF EXISTS master.dynamic_field;') + +#rollback backup table +cur.execute('ALTER TABLE master.dynamic_field_migr_bkp RENAME TO dynamic_field;') + +print("Renamed dynamic_field_migr_bkp to dynamic_field") + +# Commit and close connection +conn.commit() + +print("Closing the database connection") +conn.close() diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/rollback_commands.txt b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/rollback_commands.txt new file mode 100644 index 00000000..8e23472b --- /dev/null +++ b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/rollback_commands.txt @@ -0,0 +1 @@ +python3 revoke-migration-dynamicfield.py "$SU_USER" "$SU_USER_PWD" "$DB_SERVERIP" "$DB_PORT" diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/template_delta.xlsx b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/template_delta.xlsx new file mode 100644 index 00000000..ddbff135 Binary files /dev/null and b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/template_delta.xlsx differ diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/template_type_delta.xlsx b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/template_type_delta.xlsx new file mode 100644 index 00000000..48a676ed Binary files /dev/null and b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/template_type_delta.xlsx differ diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/upgrade.properties b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/upgrade.properties new file mode 100644 index 00000000..b3d06187 --- /dev/null +++ b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/upgrade.properties @@ -0,0 +1,16 @@ +DB_SERVERIP=localhost +DB_PORT=30091 +SU_USER=dbuser +SU_USER_PWD= +ACTION=upgrade +UPGRADE_DOMAIN_NAME= +GLOBAL_ADMIN_USER= +GLOBAL_ADMIN_USER_PWD= +PRIMARY_LANGUAGE_CODE= +SECONDARY_LANGUAGE_CODE= +GENDER_DYNAMIC_FIELD_NAME=gender +INDIVIDUAL_TYPE_DYNAMIC_FIELD_NAME=residenceStatus +IDENTITY_MAPPING_JSON_URL=https://raw.githubusercontent.com/mosip/mosip-config/qa3-upgrade/identity-mapping.json +AGEGROUP_CONFIG={"CHILD":{"bioAttributes":["face"],"isGuardianAuthRequired":true},"ADULT":{"bioAttributes":["leftEye","rightEye","rightIndex","rightLittle","rightRing","rightMiddle","leftIndex","leftLittle","leftRing","leftMiddle","leftThumb","rightThumb","face"],"isGuardianAuthRequired":false},"SENIOR_CITIZEN":{"bioAttributes":["leftEye","rightEye","rightIndex","rightLittle","rightRing","rightMiddle","leftIndex","leftLittle","leftRing","leftMiddle","leftThumb","rightThumb","face"],"isGuardianAuthRequired":false}} +INFANT_AGEGROUP=CHILD +ALLOWED_BIO_ATTRIBUTES=leftEye,rightEye,rightIndex,rightLittle,rightRing,rightMiddle,leftIndex,leftLittle,leftRing,leftMiddle,leftThumb,rightThumb,face diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/upgrade.sh b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/upgrade.sh new file mode 100755 index 00000000..2277ea46 --- /dev/null +++ b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/upgrade.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +set -e +properties_file="$1" +echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file" +if [ -f "$properties_file" ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found." + while IFS='=' read -r key value + do + key=$(echo $key | tr '.' '_') + eval ${key}=\${value} + done < "$properties_file" +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument." +fi + +echo "Action: $ACTION" + +line_number=0 +# Execute upgrade or rollback +if [ "$ACTION" == "upgrade" ]; then + while read command; do + let "line_number=line_number+1" + echo "==================== Executing Command : $line_number ===========================" + eval ${command} + done < upgrade_commands.txt + +elif [ "$ACTION" == "rollback" ]; then + while read command; do + let "line_number=line_number+1" + echo "==================== Executing Command : $line_number ===========================" + eval ${command} + done < rollback_commands.txt + +else + echo "Unknown action: $ACTION, must be 'upgrade' or 'rollback'." + exit 1 +fi \ No newline at end of file diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/upgrade_commands.txt b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/upgrade_commands.txt new file mode 100644 index 00000000..cbf37391 --- /dev/null +++ b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/upgrade_commands.txt @@ -0,0 +1,7 @@ +python3 migration-ui_spec.py -d "$UPGRADE_DOMAIN_NAME" -u "$GLOBAL_ADMIN_USER" -p "$GLOBAL_ADMIN_USER_PWD" -pl "$PRIMARY_LANGUAGE_CODE" -sl "$SECONDARY_LANGUAGE_CODE" --identityMappingJsonUrl "$IDENTITY_MAPPING_JSON_URL" --ageGroupConfig "$AGEGROUP_CONFIG" --infantAgeGroup "$INFANT_AGEGROUP" --allowedBioAttributes "$ALLOWED_BIO_ATTRIBUTES" +python3 migration-dynamicfield.py "$SU_USER" "$SU_USER_PWD" "$DB_SERVERIP" "$DB_PORT" "$GENDER_DYNAMIC_FIELD_NAME" "$INDIVIDUAL_TYPE_DYNAMIC_FIELD_NAME" +python3 data-uploader.py --domain "$UPGRADE_DOMAIN_NAME" --username "$GLOBAL_ADMIN_USER" --password "$GLOBAL_ADMIN_USER_PWD" --table "template_type" --operation "Insert" --autogen 0 --file "template_type_delta.xlsx" +python3 data-uploader.py --domain "$UPGRADE_DOMAIN_NAME" --username "$GLOBAL_ADMIN_USER" --password "$GLOBAL_ADMIN_USER_PWD" --table "template" --operation "Insert" --dbusername "$SU_USER" --dbpassword "$SU_USER_PWD" --dbhost "$DB_SERVERIP" --dbport "$DB_PORT" --sheetname Sheet1 --idcolumn A --autogen 1 --file "template_delta.xlsx" +python3 data-uploader.py --domain "$UPGRADE_DOMAIN_NAME" --username "$GLOBAL_ADMIN_USER" --password "$GLOBAL_ADMIN_USER_PWD" --table "machine_type" --operation "Insert" --dbusername "$SU_USER" --dbpassword "$SU_USER_PWD" --dbhost "$DB_SERVERIP" --dbport "$DB_PORT" --sheetname machine_type --autogen 0 --file "machine_type_delta.xlsx" +python3 data-uploader.py --domain "$UPGRADE_DOMAIN_NAME" --username "$GLOBAL_ADMIN_USER" --password "$GLOBAL_ADMIN_USER_PWD" --table "machine_spec" --operation "Insert" --dbusername "$SU_USER" --dbpassword "$SU_USER_PWD" --dbhost "$DB_SERVERIP" --dbport "$DB_PORT" --sheetname machine_spec --autogen 0 --file "machine_spec_delta.xlsx" +python3 data-uploader.py --domain "$UPGRADE_DOMAIN_NAME" --username "$GLOBAL_ADMIN_USER" --password "$GLOBAL_ADMIN_USER_PWD" --table "zone_user" --operation "Insert" --dbusername "$SU_USER" --dbpassword "$SU_USER_PWD" --dbhost "$DB_SERVERIP" --dbport "$DB_PORT" --sheetname zone_user --autogen 0 --file "zone_user_delta.xlsx" \ No newline at end of file diff --git a/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/zone_user_delta.xlsx b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/zone_user_delta.xlsx new file mode 100644 index 00000000..790918c1 Binary files /dev/null and b/mosip_master/data_upgrade/1.1.5.5_to_1.2.0.1/zone_user_delta.xlsx differ diff --git a/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/README.md b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/README.md new file mode 100644 index 00000000..2dfbf50d --- /dev/null +++ b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/README.md @@ -0,0 +1,18 @@ +## Migrating country-specific data from 1.2.0.1 to 1.3.0 version + +Prerequisites: +-> SQL migration must be successfully executed. +-> Go to upgrade.properties file and modify the property values as per the environment. + +Note: List of the commands executed during the data upgrade can be found in upgrade_commands.txt. One command per line. Commands using data-uploader.py script cannot be executed after it is successfully executed once. It should be commented for the next execution(upgrade.sh ignores the commented lines). + + +1. Template type and Template data change: + + New template types and templates were introduced, also some of the template contents were modified. All the new types and templates itself are provided in the template_delta.xlsx and tempate_type_delta.xlsx file in the same directoy in english, arabic, french, kannada, hindi and tamil languages. + All the modified template_types and templates are listed in the template_updated.xlsx and template_type_updated.xlsx file. + + 1. "id" column in the template_delta.xlsx excel sheet is autogenerated before upload to server, as autogen is set to 1. + 2. Make sure to remove unsupported languages from the excel files before starting the migration. + 3. Cross check if all the language specific data is valid and correct w.r.t the language.Make the change as required in the template text (file_text column) if required. + 4. Update the "id" column in template_updated.xlsx with correct "id" value as present in your DB before starting the migration. diff --git a/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/data-uploader.py b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/data-uploader.py new file mode 100644 index 00000000..8f2ec576 --- /dev/null +++ b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/data-uploader.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- + +#!/usr/bin/python3 + + +## This script should be executed after DB upgrade and 1.2.0.* masterdata-service deployment + +from datetime import datetime, timezone, timedelta +import argparse +import requests +import json +import sys +import time +import psycopg2 +import openpyxl + +parser = argparse.ArgumentParser(description='This is CSV/xlsx file uploader script.invokes 1.2.0.1 bulk upload endpoints') +parser.add_argument("--domain", type=str, required=True, help="Server domain name, eg: api-internal.dev.mosip.net") +parser.add_argument("--username", type=str, required=True, help="User with GLOBAL_ADMIN & REGISTRATION_ADMIN role") +parser.add_argument("--password", type=str, required=True, help="User password") +parser.add_argument("--table", type=str, required=True, help="Database table name") +parser.add_argument("--operation", type=str, required=True, help="Database operation, eg: Insert or Update or Delete") +parser.add_argument("--file", type=str, required=True, help="Input file CSV or xlsx") +parser.add_argument("--autogen", choices=(1,0), default=0, type=int, required=False, help="Autogenerate value for id column") +parser.add_argument("--idcolumn", type=str, required=False, help="id column name, eg: A or B ...") +parser.add_argument("--sheetname", type=str, required=False, help="Sheet name to operate") +parser.add_argument("--dbusername", type=str, required=False, help="DB username") +parser.add_argument("--dbpassword", type=str, required=False, help="DB username") +parser.add_argument("--dbhost", type=str, required=False, help="DB hostname") +parser.add_argument("--dbport", type=str, required=False, help="DB port number") + +args = parser.parse_args() + +## Values to be updated as per the deployment +authURL='https://'+args.domain+'/v1/authmanager/authenticate/useridPwd' +uploadURL='https://'+args.domain+'/v1/admin/bulkupload' +uploadStatusURL='https://'+args.domain+'/v1/admin/bulkupload/transcation/' +username=args.username +password=args.password + +def getCurrentDateTime(): + dt_now = datetime.now(timezone.utc) + dt_now_str = dt_now.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + return dt_now_str+'Z' + + +def get_seed_value(): + conn = psycopg2.connect(database="mosip_master", user = args.dbusername, password = args.dbpassword, host = args.dbhost, port = args.dbport) + cursor = conn.cursor() + cursor.execute("select id from master."+args.table+" order by id desc limit 20") + for row in cursor.fetchall(): + id_value = row[0] + if id_value is None: + seed_value = 1000 + break + if id_value.isdigit(): + seed_value = id_value + break; + + if seed_value == None: + seed_value = 1000 + return seed_value + + +def find_last_data_row(sheet): + max_row = sheet.max_row + + for row in range(max_row, 0, -1): + for cell in sheet[row]: + if cell.value is not None: + return row + +def fill_series(): + if args.sheetname == None: + print("Sheet name is required to fill series in id column.") + exit(1) + + if args.idcolumn == None: + print("id column name is required to fill series.") + exit(1) + + seed_value = get_seed_value() + + print("Sheet name: ",args.sheetname) + print("Id column to fill series: ", args.idcolumn) + print("Seed value: ", seed_value) + + workbook = openpyxl.load_workbook(args.file) + sheet = workbook[args.sheetname] + column = sheet[args.idcolumn] + + start_row = 2 + end_row = find_last_data_row(sheet) + + print("Start Row: ", start_row) + print("End Row: ", end_row) + + if(start_row is None and end_row is None): + print("Need a valid start_row and end_row!") + return + + for i, value in enumerate(range(start_row, end_row + 1), start=1): + column[i].value = int(seed_value) + value + + workbook.save(args.file) + workbook.close() + + + +def getAccessToken(): + auth_req_data = { + 'id': 'string', + 'metadata': {}, + 'request': { + 'appId': 'admin', + 'password': password, + 'userName': username + }, + 'requesttime': getCurrentDateTime(), + 'version': 'string' + } + authresponse=requests.post(authURL, json= auth_req_data) + print(json.dumps(authresponse.json())) + return authresponse.headers["authorization"] + + + +def uploadFile(): + if args.autogen == 1 : + fill_series() + + data = {'category': 'masterdata', 'operation': args.operation, 'tableName': args.table} + files = {'files': open(args.file, 'rb')} + uploadResponse = requests.post(uploadURL, data=data, files=files, headers=req_headers, verify=True) + uploadResponse_json = uploadResponse.json() + response = uploadResponse_json['response'] + print(json.dumps(uploadResponse_json)) + return response['transcationId'] + + +def getTransactionStatus(transactionId): + statusResponse = requests.get(uploadStatusURL+transactionId, headers=req_headers, verify=True) + statusResponse_json = statusResponse.json() + response = statusResponse_json['response'] + return response + + +req_headers={'Cookie' : 'Authorization='+getAccessToken()} +transactionId = uploadFile() +while True: + time.sleep(5) ## sleep for 5 seconds + status_response = getTransactionStatus(transactionId) + print(json.dumps(status_response)) + status = status_response["status"] + if status == "COMPLETED": + break + if status == "FAILED": + sys.exit("Transcation failed") + + + + diff --git a/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/rollback_commands.txt b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/rollback_commands.txt new file mode 100644 index 00000000..e69de29b diff --git a/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_delta.xlsx b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_delta.xlsx new file mode 100644 index 00000000..779f09e8 Binary files /dev/null and b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_delta.xlsx differ diff --git a/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_type_delta.xlsx b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_type_delta.xlsx new file mode 100644 index 00000000..6ca013c0 Binary files /dev/null and b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_type_delta.xlsx differ diff --git a/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_type_updated.xlsx b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_type_updated.xlsx new file mode 100644 index 00000000..b72d6eeb Binary files /dev/null and b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_type_updated.xlsx differ diff --git a/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_updated.xlsx b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_updated.xlsx new file mode 100644 index 00000000..634994ee Binary files /dev/null and b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/template_updated.xlsx differ diff --git a/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/upgrade.properties b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/upgrade.properties new file mode 100644 index 00000000..685755b2 --- /dev/null +++ b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/upgrade.properties @@ -0,0 +1,8 @@ +DB_SERVERIP=localhost +DB_PORT=30091 +SU_USER=dbuser +SU_USER_PWD= +ACTION=upgrade +UPGRADE_DOMAIN_NAME= +GLOBAL_ADMIN_USER= +GLOBAL_ADMIN_USER_PWD= \ No newline at end of file diff --git a/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/upgrade.sh b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/upgrade.sh new file mode 100755 index 00000000..2277ea46 --- /dev/null +++ b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/upgrade.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +set -e +properties_file="$1" +echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file" +if [ -f "$properties_file" ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found." + while IFS='=' read -r key value + do + key=$(echo $key | tr '.' '_') + eval ${key}=\${value} + done < "$properties_file" +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument." +fi + +echo "Action: $ACTION" + +line_number=0 +# Execute upgrade or rollback +if [ "$ACTION" == "upgrade" ]; then + while read command; do + let "line_number=line_number+1" + echo "==================== Executing Command : $line_number ===========================" + eval ${command} + done < upgrade_commands.txt + +elif [ "$ACTION" == "rollback" ]; then + while read command; do + let "line_number=line_number+1" + echo "==================== Executing Command : $line_number ===========================" + eval ${command} + done < rollback_commands.txt + +else + echo "Unknown action: $ACTION, must be 'upgrade' or 'rollback'." + exit 1 +fi \ No newline at end of file diff --git a/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/upgrade_commands.txt b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/upgrade_commands.txt new file mode 100644 index 00000000..013c01fe --- /dev/null +++ b/mosip_master/data_upgrade/1.2.0.1_to_1.3.0/upgrade_commands.txt @@ -0,0 +1,4 @@ +python3 data-uploader.py --domain "$UPGRADE_DOMAIN_NAME" --username "$GLOBAL_ADMIN_USER" --password "$GLOBAL_ADMIN_USER_PWD" --table "template_type" --operation "Insert" --autogen 0 --file "template_type_delta.xlsx" +python3 data-uploader.py --domain "$UPGRADE_DOMAIN_NAME" --username "$GLOBAL_ADMIN_USER" --password "$GLOBAL_ADMIN_USER_PWD" --table "template" --operation "Insert" --dbusername "$SU_USER" --dbpassword "$SU_USER_PWD" --dbhost "$DB_SERVERIP" --dbport "$DB_PORT" --sheetname Sheet1 --idcolumn A --autogen 1 --file "template_delta.xlsx" +python3 data-uploader.py --domain "$UPGRADE_DOMAIN_NAME" --username "$GLOBAL_ADMIN_USER" --password "$GLOBAL_ADMIN_USER_PWD" --table "template_type" --operation "Update" --autogen 0 --file "template_type_updated.xlsx" +python3 data-uploader.py --domain "$UPGRADE_DOMAIN_NAME" --username "$GLOBAL_ADMIN_USER" --password "$GLOBAL_ADMIN_USER_PWD" --table "template" --operation "Update" --dbusername "$SU_USER" --dbpassword "$SU_USER_PWD" --dbhost "$DB_SERVERIP" --dbport "$DB_PORT" --sheetname Sheet1 --idcolumn A --autogen 0 --file "template_updated.xlsx" \ No newline at end of file diff --git a/mosip_master/data_upgrade/README.md b/mosip_master/data_upgrade/README.md new file mode 100644 index 00000000..312cc485 --- /dev/null +++ b/mosip_master/data_upgrade/README.md @@ -0,0 +1,41 @@ +## Data migration Procedure + +All the masterdata DDL’s and platform specific tables DML’s will stay in the admin-services repository. + +Any language-dependent or country specific data change(new/update/deletion) will be in mosip_data repository. + +mosip_data → repository + + xlsx → all the Upto date DMLs + + data_upgrade → folder to hold only the delta + + _to_ → Folder to contain scripts and data files required for country-specific data migration. + + scripts to handle specific data change eg: change in UI spec + + Readme + + upgrade.sh + + upgrade.properties + + upgrade_commands.txt + + rollback_commands.txt + + +## How to run the migration script + +`bash upgrade.sh upgrade.properties` + +upgrade.sh file execute the list of commands one after the other listed in upgrade_commands.txt +Before executing the script, kindly update the upgrade.properties with valid values. + + + + + + + + diff --git a/mosip_master/xlsx/.~lock.zone.xlsx# b/mosip_master/xlsx/.~lock.zone.xlsx# deleted file mode 100644 index 5b98d9b8..00000000 --- a/mosip_master/xlsx/.~lock.zone.xlsx# +++ /dev/null @@ -1 +0,0 @@ -,techno-243,gagana26,28.04.2022 14:44,file:///home/techno-243/.config/libreoffice/4; \ No newline at end of file