From 3d53991ddc1952ed151eff969f66f84f825052ac Mon Sep 17 00:00:00 2001 From: Sada Date: Thu, 11 Feb 2021 21:52:13 +0530 Subject: [PATCH 001/130] Added archive scripts --- .../mosip_ida/mosip_archive_ida.ini | 18 ++ .../mosip_ida/mosip_archive_ida_table1.py | 107 +++++++++++ .../mosip_ida/mosip_archive_ida_table2.py | 107 +++++++++++ .../mosip_ida/mosip_archive_job_ida.sh | 16 ++ .../mosip_idrepo/mosip_archive_idrepo.ini | 20 ++ .../mosip_archive_idrepo_table1.py | 107 +++++++++++ .../mosip_archive_idrepo_table2.py | 107 +++++++++++ .../mosip_archive_idrepo_table3.py | 107 +++++++++++ .../mosip_idrepo/mosip_archive_job_idrepo.sh | 19 ++ .../mosip_prereg/mosip_archive_job_prereg.sh | 22 +++ .../mosip_prereg/mosip_archive_prereg.ini | 20 ++ .../mosip_archive_prereg_table1.py | 107 +++++++++++ .../mosip_archive_prereg_table2.py | 107 +++++++++++ .../mosip_archive_prereg_table3.py | 107 +++++++++++ .../mosip_archive_prereg_table4.py | 107 +++++++++++ .../mosip_regprc/mosip_archive_job_regprc.sh | 13 ++ .../mosip_regprc/mosip_archive_regprc.ini | 17 ++ .../mosip_archive_regprc_table1.py | 107 +++++++++++ data-archive/db_scripts/README.MD | 178 ++++++++++++++++++ .../ddl/archive-app_audit_log.sql | 75 ++++++++ ...archive-applicant_demographic_consumed.sql | 54 ++++++ .../archive-applicant_document_consumed.sql | 75 ++++++++ .../ddl/archive-auth_transaction.sql | 90 +++++++++ .../ddl/archive-otp_transaction.sql | 62 ++++++ .../ddl/archive-processed_prereg_list.sql | 41 ++++ .../ddl/archive-reg_appointment_consumed.sql | 58 ++++++ .../ddl/archive-reg_demo_dedupe_list.sql | 48 +++++ .../ddl/archive-reg_manual_verification.sql | 73 +++++++ ...archive-registered_authdevice_master_h.sql | 82 ++++++++ .../archive-registered_regdevice_master_h.sql | 82 ++++++++ .../ddl/archive-registration_transaction.sql | 72 +++++++ .../ddl/archive-uin_biometric_h.sql | 61 ++++++ .../ddl/archive-uin_document_h.sql | 67 +++++++ .../mosip_archive/ddl/archive-uin_h.sql | 70 +++++++ .../mosip_archive/ddl/archive-vid.sql | 65 +++++++ .../mosip_archive/mosip_archive_db.sql | 27 +++ .../mosip_archive/mosip_archive_db_deploy.sh | 112 +++++++++++ .../mosip_archive_ddl_deploy.sql | 19 ++ .../mosip_archive_deploy.properties | 15 ++ .../mosip_archive/mosip_archive_grants.sql | 48 +++++ .../mosip_archive/mosip_role_archiveuser.sql | 7 + .../mosip_archive/mosip_role_common.sql | 31 +++ 42 files changed, 2727 insertions(+) create mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini create mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py create mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py create mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh create mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini create mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py create mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py create mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py create mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py create mode 100644 data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh create mode 100644 data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini create mode 100644 data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py create mode 100644 data-archive/db_scripts/README.MD create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql create mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_db.sql create mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh create mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql create mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties create mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql create mode 100644 data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql create mode 100644 data-archive/db_scripts/mosip_archive/mosip_role_common.sql diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini new file mode 100644 index 00000000..9fc5cfea --- /dev/null +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini @@ -0,0 +1,18 @@ +[MOSIP-DB-SECTION] +source_db_serverip=13.233.223.29 +source_db_port=30090 +source_db_name=mosip_ida +source_schema_name=ida +source_db_uname=idacuser +source_db_pass=Mosip@dev123 +archive_table1=auth_transaction +archive_table2=otp_transaction + +archive_db_serverip=13.233.223.29 +archive_db_port=30090 +archive_db_name=mosip_archive +archive_schema_name=archive +archive_db_uname=archiveuser +archive_db_pass=Mosip@dev123 + +archive_older_than_days = 2 \ No newline at end of file diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py new file mode 100644 index 00000000..5bef0734 --- /dev/null +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Authentication DB Tables Archive +#-- DB Name : mosip_ida +#-- Table Names : auth_transaction +#-- Purpose : Job to Archive Data in ID Authentication DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table1"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py new file mode 100644 index 00000000..fab08c65 --- /dev/null +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Authentication DB Tables Archive +#-- DB Name : mosip_ida +#-- Table Names : applicant_document_consumed +#-- Purpose : Job to Archive Data in ID Authentication DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table2"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh b/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh new file mode 100644 index 00000000..0e867724 --- /dev/null +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh @@ -0,0 +1,16 @@ +### -- --------------------------------------------------------------------------------------------------------- +### -- Script Name : IDA Archive Job +### -- Deploy Module : IDA +### -- Purpose : To Archive IDA tables which are marked for archive. +### -- Create By : Sadanandegowda DM +### -- Created Date : Dec-2020 +### -- +### -- Modified Date Modified By Comments / Remarks +### -- ---------------------------------------------------------------------------------------- + +python mosip_archive_ida_table1.py & +sleep 5m + +python mosip_archive_ida_table2.py & + +#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini new file mode 100644 index 00000000..ec62c41d --- /dev/null +++ b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini @@ -0,0 +1,20 @@ +[MOSIP-DB-SECTION] +source_db_serverip=13.233.223.29 +source_db_port=30090 +source_db_name=mosip_idrepo +source_schema_name=idrepo +source_db_uname=idrepouser +source_db_pass=Mosip@dev123 +archive_table1=uin_h +archive_table2=uin_biometric_h +archive_table3=uin_document_h + + +archive_db_serverip=13.233.223.29 +archive_db_port=30090 +archive_db_name=mosip_archive +archive_schema_name=archive +archive_db_uname=archiveuser +archive_db_pass=Mosip@dev123 + +archive_older_than_days = 2 \ No newline at end of file diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py new file mode 100644 index 00000000..0f51685b --- /dev/null +++ b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Repository DB Tables Archive +#-- DB Name : mosip_idrepo +#-- Table Names : uin_h +#-- Purpose : Job to Archive Data in ID Repository DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table1"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE uin_ref_id ='"+row[0]+"'AND eff_dtimes='"+row[1]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py new file mode 100644 index 00000000..ba6f98f7 --- /dev/null +++ b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Repository DB Tables Archive +#-- DB Name : mosip_idrepo +#-- Table Names : uin_biometric_h +#-- Purpose : Job to Archive Data in ID Repository DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table2"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE uin_ref_id ='"+row[0]+"'AND biometric_file_type='"+row[1]+"'AND eff_dtimes='"+row[2]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py new file mode 100644 index 00000000..e73ed331 --- /dev/null +++ b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Repository DB Tables Archive +#-- DB Name : mosip_idrepo +#-- Table Names : uin_document_h +#-- Purpose : Job to Archive Data in ID Repository DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table3"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE uin_ref_id ='"+row[0]+"'AND doccat_code='"+row[1]+"'AND eff_dtimes='"+row[3]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh new file mode 100644 index 00000000..53433df9 --- /dev/null +++ b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh @@ -0,0 +1,19 @@ +### -- --------------------------------------------------------------------------------------------------------- +### -- Script Name : ID Repository Archive Job +### -- Deploy Module : Pre registration +### -- Purpose : To Archive ID Repository tables which are marked for archive. +### -- Create By : Sadanandegowda DM +### -- Created Date : Dec-2020 +### -- +### -- Modified Date Modified By Comments / Remarks +### -- ---------------------------------------------------------------------------------------- + +python mosip_archive_idrepo_table1.py & +sleep 5m + +python mosip_archive_idrepo_table2.py & +sleep 5m + +python mosip_archive_idrepo_table3.py & + +#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh b/data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh new file mode 100644 index 00000000..31b35123 --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh @@ -0,0 +1,22 @@ +### -- --------------------------------------------------------------------------------------------------------- +### -- Script Name : Pre Registration Archive Job +### -- Deploy Module : Pre registration +### -- Purpose : To Archive Pre Registration tables which are marked for archive. +### -- Create By : Sadanandegowda DM +### -- Created Date : Dec-2020 +### -- +### -- Modified Date Modified By Comments / Remarks +### -- ---------------------------------------------------------------------------------------- + +python mosip_archive_prereg_table1.py & +sleep 5m + +python mosip_archive_prereg_table2.py & +sleep 5m + +python mosip_archive_prereg_table3.py & +sleep 5m + +python mosip_archive_prereg_table4.py & + +#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini new file mode 100644 index 00000000..58cf5b8a --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini @@ -0,0 +1,20 @@ +[MOSIP-DB-SECTION] +source_db_serverip=13.233.223.29 +source_db_port=30090 +source_db_name=mosip_prereg +source_schema_name=prereg +source_db_uname=prereguser +source_db_pass=Mosip@dev123 +archive_table1=applicant_demographic_consumed +archive_table2=applicant_document_consumed +archive_table3=reg_appointment_consumed +archive_table4=processed_prereg_list + +archive_db_serverip=13.233.223.29 +archive_db_port=30090 +archive_db_name=mosip_archive +archive_schema_name=archive +archive_db_uname=archiveuser +archive_db_pass=Mosip@dev123 + +archive_older_than_days = 2 \ No newline at end of file diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py new file mode 100644 index 00000000..ebf1bbe7 --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Pre Registration DB Tables Archive +#-- DB Name : mosip_prereg +#-- Table Names : applicant_demographic_consumed +#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table1"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE prereg_id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py new file mode 100644 index 00000000..db2fca1e --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Pre Registration DB Tables Archive +#-- DB Name : mosip_prereg +#-- Table Names : applicant_document_consumed +#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table2"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py new file mode 100644 index 00000000..d1422594 --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Pre Registration DB Tables Archive +#-- DB Name : mosip_prereg +#-- Table Names : applicant_appointment_consumed +#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table3"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py new file mode 100644 index 00000000..ebf1bbe7 --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Pre Registration DB Tables Archive +#-- DB Name : mosip_prereg +#-- Table Names : applicant_demographic_consumed +#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table1"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE prereg_id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh b/data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh new file mode 100644 index 00000000..cfa6acbf --- /dev/null +++ b/data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh @@ -0,0 +1,13 @@ +### -- --------------------------------------------------------------------------------------------------------- +### -- Script Name : Registration Processor Archive Job +### -- Deploy Module : Pre registration +### -- Purpose : To Archive Registration Processor tables which are marked for archive. +### -- Create By : Sadanandegowda DM +### -- Created Date : Dec-2020 +### -- +### -- Modified Date Modified By Comments / Remarks +### -- ---------------------------------------------------------------------------------------- + +python mosip_archive_regprc_table1.py & + +#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini b/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini new file mode 100644 index 00000000..ff68deda --- /dev/null +++ b/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini @@ -0,0 +1,17 @@ +[MOSIP-DB-SECTION] +source_db_serverip=13.233.223.29 +source_db_port=30090 +source_db_name=mosip_regprc +source_schema_name=regprc +source_db_uname=regprcuser +source_db_pass=Mosip@dev123 +archive_table1=registration_transaction + +archive_db_serverip=13.233.223.29 +archive_db_port=30090 +archive_db_name=mosip_archive +archive_schema_name=archive +archive_db_uname=archiveuser +archive_db_pass=Mosip@dev123 + +archive_older_than_days = 2 \ No newline at end of file diff --git a/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py b/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py new file mode 100644 index 00000000..a58aabb2 --- /dev/null +++ b/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Registration Processor DB Tables Archive +#-- DB Name : mosip_regprc +#-- Table Names : registration_transaction +#-- Purpose : Job to Archive Data in registration processor DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table1"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/db_scripts/README.MD b/data-archive/db_scripts/README.MD new file mode 100644 index 00000000..4d2ff543 --- /dev/null +++ b/data-archive/db_scripts/README.MD @@ -0,0 +1,178 @@ +## MOSIP Commons module Databases (**mosip_master, mosip_kernel, mosip_idrepo, mosip_idmap, mosip_iam, mosip_audit**) scripts inventory and deployment guidelines on postgresql database. + +#### The details disclosed below gives a clear information on complete database script structure with the instructions for database scripts deployments. + +## Prerequisities + +* DB Server and access details + +* Postgres client (psql) has to be installed on the deployment servers. + +* Copy latest database scripts(DDL, DML, .SH ... etc) from git/repository on to the DB deployment server. + +* Necessary details to be updated in peoperties file against to the releavnt variables being used (details listed below). + +* Database objects related to MOSIP modules are placed in "**mosip_base_directory**>>db_scripts>>mosip_ folder on git/repository + +**Example:** the commons module script folder is /**mosip_base_directory**>>db_scripts>>mosip_kernel where all the database scripts related to kernel are available. + +* Create a log file directory on DB deployment server before updating the properties file. Please follow the steps to create the same: + + bash-4.2$mkdir /mosip_base_directory/ + +* If we wish to place the log files under different directory other than the above mentioned then we need to create directory and specify the path of the directory in the properties file. + +* Pull the DB deployment scripts from Git repository to the deployment server and start deploying OR + +* If are pulling to local system from Git repository and pushing them back to deployment server using WinSCP then make a note to modify the following encoding settings in WinSCP before pushing the files to deployment server --> Open WinSCP --> Options --> Preferences --> Transfer --> Edit --> In "Transfer mode" section --> select "Text" --> Click Ok --> Click Ok + +## Each database folder has the following files / folders + +* **ddl folder:** This folder contains all the database data definition language (DDL) scripts to create or alter a database object of this module. + +* **dml folder:** This folder contains the scripts (insert/update/delete scripts) to create seed data / metadata needed to run this module. + +* **mosip__db.sql:** This file contains the database creation script of this module + +* **mosip__grants.sql:** The needed privilege / grants scripts assigned to database user / role to access database objects are described in this file. + +* **mosip_role_user.sql:** The role creation script that will be used by the application to perform DML operations is defined here. + +* **mosip_role_common.sql:** This file contains the common roles creation script that are needed to manage the database. + +* **mosip__ddl_deploy.sql:** This is a wrapper script used to **deploy the DDL scripts available in ddl folder**. This will also be used to prepare the script run sequence to manage all the needed dependency across DB objects being created. + +* **mosip__dml_deploy.sql:** This is a wrapper script used to **deploy the DML scripts available in dml folder**. This will also used to prepare the script run sequence to manage all the needed dependency across DB objects. + +* **mosip__db_deploy.sh:** This is the shell script available and present in each database folders/directories. + +* **mosip__deploy.properties:** This is the properties file name and present in each database. + +* **mosip_commons_db_deployment.sh:** This is the .sh file which is present in /home/madmin/database directory and which will be executed for all commons database deployment in single command execution. + +**Note :** Not all Modules will have dml scripts. Make necessary changes in the properties file with dml variables for the modules where dml exists. + +**Note :** No need to change anything in the shell script unless it is really causing any problem or any further implementation is being introduced. + +Once we complete with sourcing the database files, we need to follow the below DB deployment process with the modifying the properties file according the requirement. + +## Deployment can be performed in two ways based on the requirement and they are as follows: +1) DB Deployment for all common module databases +2) DB Deployment for single or selected databases + +### Properties file variable details and description: Properties file has to be updated with the required details before proceeding with deployment steps for each databases. + +**DB_SERVERIP:** Contains details of Destination DB SERVER_IP(Ex:10.0.0.1) where the deployment is targeted + +**DB_PORT:** Contains the postgres server port details where the postgres is allowed to connect. Ex: 5433 + +**SU_USER:** Contains the postgres super user name to connect to the postgres database i.e. postgres + +**SU_USER_PWD:** Contains the password for postgres super user + +**DEFAULT_DB_NAME:** Default database name to connect with respective postgres server i.e. ex: postgres + +**MOSIP_DB_NAME:** MOSIP Database name for which the deployment is scheduled. + +**SYSADMIN_USER:** This variable contains the mosip_common_role which indeed is going to be the super user for the remaining actions going to be performed by shell script. + +**SYSADMIN_PWD:** Contains the credential details for SYSADMIN_USER. + +**DBADMIN_PWD:** Contains the credential details for DBADMIN_USER. + +**APPADMIN_PWD:** Contains the credential details for APPADMIN_USER. + +**DBUSER_PWD:** Contains the credential details for dbuserpwd. + +**BASE_PATH:** Path for DB scrips which are kept in the Deployment server. + +**LOG_PATH:** Path where deployment log file will be created + +**COMMON_ROLE_FILENAME:** Contains the common roles creation filename, ex: mosip_role_common.sql + +**APP_ROLE_FILENAME:** Contains specific DB user role creation filename, ex: mosip_role_databaseuser.sql + +**DB_CREATION_FILENAME:** Contains specific DB creation script name, ex: mosip_database_db.sql. + +**ACCESS_GRANT_FILENAME:** This variable contains file name of access provisioning script details for the above created users, ex: mosip__grants.sql. + +**DDL_FILENAME:** DDL script file name, ex:mosip__ddl_deploy.sql. + +**DML_FLAG:** Its a flag variable which contains value as 0 or 1 for any DML existance for the particular DB. if flag=0 then no DML else flag=1. + +**DML_FILENAME:** DML cript file name only if the flag=1, else it will be empty or null, ex: mosip__dml_deploy.sql. + +**Note - Make sure, There is a single empty line at end of the .properties files content and No spaces in beggining and end of the parameter values** + +## DB Deployment for all common module databases with single click deployment: + +**Step 1** -> Make prior modification to all the respective database properties files **(mosip__deploy.properties)** in the respective database directories. Path of properties file and variables list remains same as explained above. Once the properties files are ready then access the directory where the deployment script is kept. + +**Step 2** -> Deployment on all common module databases, run the **"mosip_commons_db_deployment.sh"** script which is avialble in the /database directory. To access **"mosip_commons_db_deployment.sh"** script, follow the below given commands: + + **Enter:-bash-4.2$** cd /home/madmin/database/ + + **Enter:-bash-4.2$** bash mosip_commons_db_deployment.sh + +**Step 3** -> Please observe Post Deployment Validation steps below + +**No modification required to be done on any of the <>.sql files in the database folder. If it is required to be modified then please reach out to database team and have it modified.** + +## DB_Deployment for single or selected databases + +**Step 1:** update the properties(.properties) file with the required parameter values for single or selected databases. + +All these .sh and properties files are kept in each database directories. Please follow the below steps: + +**Step 2** -> Login into Deployment server/VM + +**Step 3** -> check the pwd(present working directory). Make sure we are inside the right database folder/directory to run the deployment for that specific database. + +**Enter:-bash-4.2$** pwd +This should be the path if we are performing deployment for the database name **mosip_schema_name** : /home/madmin/database/mosip_ + +**Step 4** -> Please move all the necessary files from local directory to the deployment server directory under respective databases. + +**Step 5** -> After prior modifications to the properties file, run the below deployment shell script as given: + +**Enter:-bash-4.2$** bash mosip__db_deploy.sh mosip__deploy.properties + +**Step 6** -> Please observe Post Deployment Validation steps below + +**No modification required to be done on any of the <>.sql files in the database folder. If it is required to be modified then please reach out to database team and have it modified.** + +### Post Deployment Validation + +**Note:** If you encounter the following messages then please recheck the details(ip address, port number, database name, password) entered in the properties file, the message would be as follows: + +. + + + + + +**Key points during or after the script execution:** + + * Properties file found message + + * Server status + + * Accessing the right path for DB deploy + + * Creates respective roles + + * Check for any active connections + + * Creates roles, creating Database, schemas, granting access, creating respective tables. + + * Loading data or DML operations valid only for those DB's which carries DML actions. + + * End of sourcing or deployment process. + +**Post deployment process, look out for database deployment log file which captures all stages of deployment. Log file path is defined in the properties file of the databases.** + +**During all the above stages please watch out for any errors which will be capture in the log file.** + +Kindly ignore **NOTICE** or **SKIPPING** messages. As these messages states that particular action is already in place hence sql script ignore performing again. + +### Post deployment process, look out for each database deployment log files which captures all stages of deployment. Log file path is defined in the properties file of the respective databases. diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql new file mode 100644 index 00000000..001596ae --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql @@ -0,0 +1,75 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.app_audit_log +-- Purpose : Application Audit Log : To track application related audit details for analysing, auditing and reporting purposes +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.app_audit_log | type: TABLE -- +-- DROP TABLE IF EXISTS archive.app_audit_log CASCADE; +CREATE TABLE archive.app_audit_log( + log_id character varying(64) NOT NULL, + log_dtimes timestamp NOT NULL, + log_desc character varying(2048), + event_id character varying(64) NOT NULL, + event_type character varying(64) NOT NULL, + event_name character varying(128) NOT NULL, + action_dtimes timestamp NOT NULL, + host_name character varying(128) NOT NULL, + host_ip character varying(16) NOT NULL, + session_user_id character varying(256) NOT NULL, + session_user_name character varying(128), + app_id character varying(64) NOT NULL, + app_name character varying(128) NOT NULL, + module_id character varying(64), + module_name character varying(128), + ref_id character varying(64), + ref_id_type character varying(64), + cr_by character varying(256) NOT NULL, + CONSTRAINT pk_audlog_log_id PRIMARY KEY (log_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.app_audit_log IS 'Application Audit Log : To track application related audit details for analysing, auditing and reporting purposes'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.log_id IS 'Log Id: Unique audit log id for each audit event log entry across the system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.log_dtimes IS 'Log DateTimestamp: Audit Log Datetimestamp'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.log_desc IS 'Log Description: Detailed description of the audit event'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.event_id IS 'Event Id: Event ID that triggered for which the audit action happend'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.event_type IS 'Event Type: Type of event that triggered the audit log, like, SYSTEM, USER, APPLICATION, BATCH etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.event_name IS 'Event Name: Event Name of the Event Id captured'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.action_dtimes IS 'Action DateTimestamp: Timestamp of an application action happend.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.host_name IS 'Host Name: Host Name of the Host ID captured, if any.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.host_ip IS 'Host Ip: Machine or device host Ip address of audit action event that happend/triggered'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.session_user_id IS 'Session user Id: Active User ID of the person who is logged in to the system and performing any action that triggered the audit log.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.session_user_name IS 'Session user Name: User Name of the Session User ID.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.app_id IS 'Application Id: Application Id of audit action happened and logged.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.app_name IS 'Application Name: Application Name'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.module_id IS 'Module Id: Application Module ID that triggered audit trigger log.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.module_name IS 'Module Name: Application Module Name of the Module ID captured.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.ref_id IS 'Reference Id: Reference ID for any cross reference purpose relevant for audit tracking, user id, app id, app or module id, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.ref_id_type IS 'Reference Id Type: Type of reference id entered'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql new file mode 100644 index 00000000..04a6d805 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql @@ -0,0 +1,54 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive. +-- Purpose : +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.applicant_demographic_consumed | type: TABLE -- +-- DROP TABLE IF EXISTS archive.applicant_demographic_consumed CASCADE; +CREATE TABLE archive.applicant_demographic_consumed( + prereg_id character varying(36) NOT NULL, + demog_detail bytea NOT NULL, + demog_detail_hash character varying(64) NOT NULL, + encrypted_dtimes timestamp NOT NULL, + status_code character varying(36) NOT NULL, + lang_code character varying(3) NOT NULL, + cr_appuser_id character varying(256) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + CONSTRAINT pk_appldemc_prereg_id PRIMARY KEY (prereg_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.applicant_demographic_consumed IS 'Applicant Demographic Consumed: Stores demographic details of an applicant that was comsumed.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.prereg_id IS 'Pre Registration ID: Unique Id generated for an individual during the pre-registration process which will be referenced during registration process at a registration center.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.demog_detail IS 'Demographic Detail: Demographic details of an individual, stored in json format.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.demog_detail_hash IS 'Demographic Detail Hash: Hash value of the demographic details stored in json format in a separate column. This will be used to make sure that nobody has tampered the data.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.encrypted_dtimes IS 'Encrypted Data Time: Date and time when the data was encrypted. This will also be used get the key for decrypting the data.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.status_code IS 'Status Code: Status of the pre-registration application. The application can be in draft / pending state or submitted state'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.cr_appuser_id IS 'Applciation Created User Id: User ID of the individual who is submitting the pre-registration application. It can be for self or for others like family members.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql new file mode 100644 index 00000000..696e7ee0 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql @@ -0,0 +1,75 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.applicant_document_consumed +-- Purpose : Applicant Document Consumed: Documents that are uploaded as part of pre-registration process which was consumed is maintained here. +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.applicant_document_consumed | type: TABLE -- +-- DROP TABLE IF EXISTS archive.applicant_document_consumed CASCADE; +CREATE TABLE archive.applicant_document_consumed( + id character varying(36) NOT NULL, + prereg_id character varying(36) NOT NULL, + doc_name character varying(128) NOT NULL, + doc_cat_code character varying(36) NOT NULL, + doc_typ_code character varying(36) NOT NULL, + doc_file_format character varying(36) NOT NULL, + doc_id character varying(128) NOT NULL, + doc_hash character varying(64) NOT NULL, + encrypted_dtimes timestamp NOT NULL, + status_code character varying(36) NOT NULL, + lang_code character varying(3) NOT NULL, + cr_by character varying(256), + cr_dtimes timestamp, + upd_by character varying(256), + upd_dtimes timestamp, + CONSTRAINT pk_appldocc_prereg_id PRIMARY KEY (id) + +); +-- indexes section ------------------------------------------------- +create unique index idx_appldocc_prereg_id on archive.applicant_document_consumed (prereg_id, doc_cat_code, doc_typ_code) ; + +-- ddl-end -- +COMMENT ON TABLE archive.applicant_document_consumed IS 'Applicant Document Consumed: Documents that are uploaded as part of pre-registration process which was consumed is maintained here. '; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.id IS 'Id: Unique id generated for the documents being uploaded as part of pre-registration process.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.prereg_id IS 'Pre Registration Id: Id of the pre-registration application for which the documents are being uploaded.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_name IS 'Document Name: Name of the document that is uploaded'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_cat_code IS 'Document Category Code: Document category code under which the document is being uploaded. Refers to master.document_category.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_typ_code IS 'Document Type Code: Document type code under which the document is being uploaded. Refers to master.document_type.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_file_format IS 'Documenet File Format: Format in which the document is being uploaded. Refers to master.document_file_format.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_id IS 'Document Id: ID of the document being uploaded'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_hash IS 'Document Hash: Hash value of the document being uploaded in document store. This will be used to make sure that nobody has tampered the document stored in a separate store. '; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.encrypted_dtimes IS 'Encrypted Data Time: Date and time when the document was encrypted before uploading it on document store. This will also be used get the key for decrypting the data.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.status_code IS 'Status Code: Status of the document that is being uploaded.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- + + + + + + + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql new file mode 100644 index 00000000..24200af5 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql @@ -0,0 +1,90 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.auth_transaction +-- Purpose : Authentication Transaction : To track all authentication transactions steps / stages in the process +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.auth_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.auth_transaction CASCADE; +CREATE TABLE archive.auth_transaction( + id character varying(36) NOT NULL, + request_dtimes timestamp NOT NULL, + response_dtimes timestamp NOT NULL, + request_trn_id character varying(64), + auth_type_code character varying(36) NOT NULL, + status_code character varying(36) NOT NULL, + status_comment character varying(1024), + lang_code character varying(3) NOT NULL, + ref_id_type character varying(36), + ref_id character varying(64), + token_id character varying(128) NOT NULL, + requested_entity_type character varying(64), + requested_entity_id character varying(36), + requested_entity_name character varying(128), + static_tkn_id character varying(64), + request_signature character varying, + response_signature character varying, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_authtrn_id PRIMARY KEY (id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.auth_transaction IS 'Authentication Transaction : To track all authentication transactions steps / stages in the process flow.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.id IS 'ID: This is unique transaction id assigned for each authentication transaction'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.request_dtimes IS 'Request Datetimestamp : Timestamp of Authentication request received from client system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.response_dtimes IS 'Response Datetimestamp : Date timestamp of response sent back to client system for the authentication request. '; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.request_trn_id IS 'Request Transaction Id : Unique Authentication request transaction id assigned for each request received from client system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.auth_type_code IS 'Authentication Type Code : Type of authentication for the specific transaction, for ex., OTP, BIO, DEMO, etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.status_code IS 'Status Code : Current Status code of the transaction in a process flow.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.status_comment IS 'Status Comment : Description for the status entered/updated by user or system assigned for the specific transaction.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.ref_id_type IS 'Reference Id Type: Type of reference id entered in reference id column for ex., USER, VIRTUALID, UIN, PREREG, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.ref_id IS 'Reference Id: Reference ID for any cross reference purpose relevant for tracking, for ex., user id, uin, vid, prereg id, rid etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.token_id IS 'Token ID : Token ID generated in reference with UIN/VID'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.requested_entity_type IS 'Requested Entity Type: Type of entity through which the authentication request was initiated. It can from a partner, internal authenticaition, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.requested_entity_id IS 'Requested Entity Id: ID of the entity through which the authentication request was initiated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.requested_entity_name IS 'Requested Entity Name: Name of the entity through which the authentication request was initiated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.static_tkn_id IS 'Static Token Id : This is a static token id assigned for each authentication request. Static token id is combination of TSPID + UIN generated for any TSP or Individuls and sent back in response. End user can use this id while authenticating themselves. '; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.request_signature IS 'Request Signature: Request body information stored with signed'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.response_signature IS 'Response Signature: Response body stored with signed'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql new file mode 100644 index 00000000..7034abf6 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql @@ -0,0 +1,62 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.otp_transaction +-- Purpose : OTP Transaction: All OTP related data and validation details are maintained here for ID Authentication +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.otp_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.otp_transaction CASCADE; +CREATE TABLE archive.otp_transaction( + id character varying(36) NOT NULL, + ref_id character varying(64) NOT NULL, + otp_hash character varying(512) NOT NULL, + generated_dtimes timestamp, + expiry_dtimes timestamp, + validation_retry_count smallint, + status_code character varying(36), + lang_code character varying(3), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_otpt_id PRIMARY KEY (id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.otp_transaction IS 'OTP Transaction: All OTP related data and validation details are maintained here for ID Authentication module.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.id IS 'ID: Key alias id is a unique identifier (UUID) used as an alias of the encryption key stored in keystore like HSM (hardware security module).'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.ref_id IS 'Reference ID: Reference ID is a reference information received from OTP requester which can be used while validating the OTP. AM: please give examples of ref_id'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.otp_hash IS 'OTP Hash: Hash of id, ref_id and otp which is generated based on the configuration setup and sent to the requester application / module.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.generated_dtimes IS 'Generated Date Time: Date and Time when the OTP was generated'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.expiry_dtimes IS 'Expiry Date Time: Date Time when the OTP will be expired'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.validation_retry_count IS 'Validation Retry Count: Validation retry counts of this OTP request. If the validation retry crosses the threshold limit, then the OTP will be de-activated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.status_code IS 'Status Code: Status of the OTP whether it is active or expired. AM: please enumerate the status types. They are only a few, not infinite'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql new file mode 100644 index 00000000..fecc7d03 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql @@ -0,0 +1,41 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.processed_prereg_list +-- Purpose : Table to store all the pre-registration list received from registration processor within pre-registration module +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.processed_prereg_list | type: TABLE -- +-- DROP TABLE IF EXISTS archive.processed_prereg_list CASCADE; +CREATE TABLE archive.processed_prereg_list( + prereg_id character varying(36) NOT NULL, + first_received_dtimes timestamp NOT NULL, + status_code character varying(36) NOT NULL, + status_comments character varying(1024), + prereg_trn_id character varying(36), + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pprlst_pk PRIMARY KEY (prereg_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.processed_prereg_list IS 'Table to store all the pre-registration list received from registration processor within pre-registration module'; +-- ddl-end -- +COMMENT ON COLUMN archive.processed_prereg_list.prereg_id IS 'Pre-registration id that was consumed by registration processor to generate UIN'; +-- ddl-end -- +COMMENT ON COLUMN archive.processed_prereg_list.first_received_dtimes IS 'Datetime when the pre-registration id was first recevied'; +-- ddl-end -- +COMMENT ON COLUMN archive.processed_prereg_list.status_code IS 'status of the pre-registration status update into actual tables'; +-- ddl-end -- +COMMENT ON COLUMN archive.processed_prereg_list.status_comments IS 'status comments of the pre-registration status update into actual tables'; +-- ddl-end -- + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql new file mode 100644 index 00000000..630f4e46 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql @@ -0,0 +1,58 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.reg_appointment_consumed +-- Purpose : Registration Appointment Consumed: Stores all the appointment requests booked by an individual at a registration center that are consumed +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.reg_appointment_consumed | type: TABLE -- +-- DROP TABLE IF EXISTS archive.reg_appointment_consumed CASCADE; +CREATE TABLE archive.reg_appointment_consumed( + id character varying(36) NOT NULL, + regcntr_id character varying(10) NOT NULL, + prereg_id character varying(36) NOT NULL, + booking_dtimes timestamp NOT NULL, + appointment_date date, + slot_from_time time, + slot_to_time time, + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + CONSTRAINT pk_rappmntc_id PRIMARY KEY (id), + CONSTRAINT uk_rappmntc_id UNIQUE (prereg_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.reg_appointment_consumed IS 'Registration Appointment Consumed: Stores all the appointment requests booked by an individual at a registration center that are consumed. '; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.id IS 'ID: Unique id generated for the registration appointment booking.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.regcntr_id IS 'Registration Center ID: Id of the Registration Center where the appointment is taken. Refers to master.registration_center.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.prereg_id IS 'Pre-Registration Id: Pre-registration id for which registration appointment is taken.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.booking_dtimes IS 'Booking Date Time: Date and Time when the appointment booking is done.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.appointment_date IS 'Appointment Date: Date for which an individual has taken an aopointment for registration at a registration center'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.slot_from_time IS 'Slot From Time: Start time of the appointment slot.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.slot_to_time IS 'Slot To Time: End time of the appointment slot.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql new file mode 100644 index 00000000..b2612231 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql @@ -0,0 +1,48 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.reg_demo_dedupe_list +-- Purpose : Registration Demographic Deduplication List: List of matched UIN / RIDs, as part of demographic data. +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.reg_demo_dedupe_list | type: TABLE -- +-- DROP TABLE IF EXISTS archive.reg_demo_dedupe_list CASCADE; +CREATE TABLE archive.reg_demo_dedupe_list( + regtrn_id character varying(36) NOT NULL, + matched_reg_id character varying(39) NOT NULL, + reg_id character varying(39) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_regded PRIMARY KEY (matched_reg_id,regtrn_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.reg_demo_dedupe_list IS 'Registration Demographic Deduplication List: List of matched UIN / RIDs, as part of demographic data.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.regtrn_id IS 'Registration Transaction ID: ID of the demo dedupe transaction, Refers to archive.registration_transaction.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.matched_reg_id IS 'Matched Registration ID: Registration ID of the individual matching with the host registration id. It can be RID or any other id related to an individual.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.reg_id IS 'Registration ID: Registration ID for which the matches are found as part of the demographic dedupe process.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql new file mode 100644 index 00000000..69593e58 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql @@ -0,0 +1,73 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.reg_manual_verification +-- Purpose : Manual Verification: Stores all the registration request which goes through manual verification process, registration can be assinged to single/multiple manual verifier as part of the verification process +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.reg_manual_verification | type: TABLE -- +-- DROP TABLE IF EXISTS archive.reg_manual_verification CASCADE; +CREATE TABLE archive.reg_manual_verification( + reg_id character varying(39) NOT NULL, + matched_ref_id character varying(39) NOT NULL, + matched_ref_type character varying(36) NOT NULL, + mv_usr_id character varying(256), + matched_score numeric(6,3), + status_code character varying(36), + reason_code character varying(36), + status_comment character varying(256), + trntyp_code character varying(36), + lang_code character varying(3) NOT NULL, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_rmnlver_id PRIMARY KEY (reg_id,matched_ref_id,matched_ref_type) + +); +-- ddl-end -- +COMMENT ON TABLE archive.reg_manual_verification IS 'Manual Verification: Stores all the registration request which goes through manual verification process, registration can be assinged to single/multiple manual verifier as part of the verification process'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.reg_id IS 'Registration ID: ID of the registration request'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.matched_ref_id IS 'Mached Reference ID: Reference ID of the mached registrations, This id can be RID'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.matched_ref_type IS 'Mached reference ID Type: Type of the Reference ID'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.mv_usr_id IS 'Manual Verifier ID: User ID of the manual verifier'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.matched_score IS 'Mached Score: Mached score as part deduplication process, This will be the combined score of multiple ABISapplications'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.status_code IS 'Status Code : Status of the manual verification'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.reason_code IS 'Reason Code : Reason code provided by the manual verifier on reason for approve or reject the registration request as part of the verification process'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.status_comment IS 'Status Comment: Comments captured as part of manual verification process'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.trntyp_code IS 'Transaction Type Code : Code of the transaction type'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql new file mode 100644 index 00000000..a50a5f00 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql @@ -0,0 +1,82 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.registered_authdevice_master_h +-- Purpose : +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.registered_authdevice_master_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.registered_authdevice_master_h CASCADE; +CREATE TABLE archive.registered_authdevice_master_h( + code character varying(36) NOT NULL, + status_code character varying(64), + device_id character varying(256) NOT NULL, + device_sub_id character varying(1024), + digital_id character varying(1024) NOT NULL, + serial_number character varying(64) NOT NULL, + device_detail_id character varying(36) NOT NULL, + purpose character varying(64) NOT NULL, + firmware character varying(128), + expiry_date timestamp, + certification_level character varying(3), + foundational_trust_provider_id character varying(36), + hotlisted boolean, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + eff_dtimes timestamp NOT NULL, + CONSTRAINT pk_authdevicemh_code PRIMARY KEY (code,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.registered_authdevice_master_h IS 'Registered Device History : History of changes of any MOSIP device registration will be stored in history table to track any chnages for future validations.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.code IS 'Registred Device Code : Unique ID generated / assigned for device which is registred in MOSIP system for the purpose'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.status_code IS 'Status Code : Status of the registered devices, The status code can be Registered, De-Registered or Retired/Revoked.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.device_id IS 'Device ID: Device ID is the unigue id provided by device provider for each device'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.device_sub_id IS 'Device Sub ID: Sub ID of the devices, Each device can have an array of sub IDs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.digital_id IS 'Digital ID: Digital ID received as a Json value containing below values like Serial number of the device, make , model, type, provider details..etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.serial_number IS 'Serial Number : Serial number of the device, This will be the Unique ID of the device by the provider'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.device_detail_id IS 'Device Detail ID'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.purpose IS 'Purpose : Purpose of these devices in the MOSIP system. ex. Registrations, Authentication, eKYC...etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.firmware IS 'Firmware: Firmware used in devices'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.expiry_date IS 'Expiry Date: expiry date of the device'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.certification_level IS 'Certification Level: Certification level for the device, This can be L0 or L1 devices'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.foundational_trust_provider_id IS 'Foundational Trust Provider ID: Foundational trust provider ID, This will be soft referenced from master.foundational_trust_provider.id. Required only for L1 devices.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time.'; +-- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql new file mode 100644 index 00000000..96b89b95 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql @@ -0,0 +1,82 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.registered_regdevice_master_h +-- Purpose : Registered Device History : History of changes of any MOSIP device registration will be stored in history table to track any chnages for future validations. +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.registered_regdevice_master_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.registered_regdevice_master_h CASCADE; +CREATE TABLE archive.registered_regdevice_master_h( + code character varying(36) NOT NULL, + status_code character varying(64), + device_id character varying(256) NOT NULL, + device_sub_id character varying(1024), + digital_id character varying(1024) NOT NULL, + serial_number character varying(64) NOT NULL, + device_detail_id character varying(36) NOT NULL, + purpose character varying(64) NOT NULL, + firmware character varying(128), + expiry_date timestamp, + certification_level character varying(3), + foundational_trust_provider_id character varying(36), + hotlisted boolean, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + eff_dtimes timestamp NOT NULL, + CONSTRAINT pk_regdevicemh_code PRIMARY KEY (code,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.registered_regdevice_master_h IS 'Registered Device History : History of changes of any MOSIP device registration will be stored in history table to track any chnages for future validations.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.code IS 'Registred Device Code : Unique ID generated / assigned for device which is registred in MOSIP system for the purpose'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.status_code IS 'Status Code : Status of the registered devices, The status code can be Registered, De-Registered or Retired/Revoked.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.device_id IS 'Device ID: Device ID is the unigue id provided by device provider for each device'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.device_sub_id IS 'Device Sub ID: Sub ID of the devices, Each device can have an array of sub IDs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.digital_id IS 'Digital ID: Digital ID received as a Json value containing below values like Serial number of the device, make , model, type, provider details..etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.serial_number IS 'Serial Number : Serial number of the device, This will be the Unique ID of the device by the provider'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.device_detail_id IS 'Device Detail ID'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.purpose IS 'Purpose : Purpose of these devices in the MOSIP system. ex. Registrations, Authentication, eKYC...etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.firmware IS 'Firmware: Firmware used in devices'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.expiry_date IS 'Expiry Date: expiry date of the device'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.certification_level IS 'Certification Level: Certification level for the device, This can be L0 or L1 devices'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.foundational_trust_provider_id IS 'Foundational Trust Provider ID: Foundational trust provider ID, This will be soft referenced from master.foundational_trust_provider.id. Required only for L1 devices.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time.'; +-- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql new file mode 100644 index 00000000..16ab0bf8 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql @@ -0,0 +1,72 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.registration_transaction +-- Purpose : Registration Transaction: Registration Processor Transaction table is to store ALL Registration Processor packet processing/process transaction details for ID issuance +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.registration_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.registration_transaction CASCADE; +CREATE TABLE archive.registration_transaction( + id character varying(36) NOT NULL, + reg_id character varying(39) NOT NULL, + trn_type_code character varying(64) NOT NULL, + remarks character varying(256), + parent_regtrn_id character varying(36), + ref_id character varying(64), + ref_id_type character varying(64), + status_code character varying(36) NOT NULL, + sub_status_code character varying(36) NOT NULL, + lang_code character varying(3) NOT NULL, + status_comment character varying(256), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_regtrn_id PRIMARY KEY (id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.registration_transaction IS 'Registration Transaction: Registration Processor Transaction table is to store ALL Registration Processor packet processing/process transaction details for ID issuance'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.id IS 'ID: Transaction id of the transactions that were recorded in registration module/application'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.reg_id IS 'Registration ID: Registration id for which these transactions are carried out at the registration client application.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.trn_type_code IS 'Transaction Type Code: Type of transaction being processed. Refers to reg.transaction_type.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.remarks IS 'Transaction Remarks: Current remarks/comments of the transaction'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.parent_regtrn_id IS 'Parent Registration ID: Parent transaction id that has triggered this transaction (if any). Refers to reg.registration_transaction.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.ref_id IS 'Reference ID: Reference id for the transaction if any'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.ref_id_type IS 'reference ID Type: reference ID type of the transaction if any'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.status_code IS 'Status Code: Current status of the transaction. Refers to code field of master.status_list table.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.sub_status_code IS 'Sub Status Code: Current sub status of the registration transaction. Refers to code field of master.status_list table.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.status_comment IS 'Status Comment: Comments provided by the actor during the transaction processing.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql new file mode 100644 index 00000000..a51f868e --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql @@ -0,0 +1,61 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive. +-- Purpose : +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.uin_biometric_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.uin_biometric_h CASCADE; +CREATE TABLE archive.uin_biometric_h( + uin_ref_id character varying(36) NOT NULL, + biometric_file_type character varying(36) NOT NULL, + eff_dtimes timestamp NOT NULL, + bio_file_id character varying(128) NOT NULL, + biometric_file_name character varying(128) NOT NULL, + biometric_file_hash character varying(64) NOT NULL, + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_uinbh PRIMARY KEY (uin_ref_id,biometric_file_type,eff_dtimes), + CONSTRAINT uk_uinbh UNIQUE (uin_ref_id,bio_file_id,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.uin_biometric_h IS 'UIN Biometric History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.uin_ref_id IS 'UIN Reference ID: System generated id mapped to a UIN used for references in the system. UIN reference ID is also used as folder/bucket in DFS (HDFS/CEPH) to store documents and biometric CBEFF file. refers to idrepo.uin.uin_ref_id'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.biometric_file_type IS 'Biometric File Type: Type of the biometric file stored in DFS (HDFS/CEPPH). File type can be individual biometric file or parent /guardian biometric file.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.eff_dtimes IS 'Effective Datetimestamp : This to track base table record changes whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time till next change occurs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.bio_file_id IS 'Biometric File ID: ID of the biometric CBEFF file that is stored in filesystem storage like HDFS/CEPH. If File ID Is not available then name of the file itself can be used as file ID.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.biometric_file_name IS 'Biometric File Name: Name of the biometric CBEFF file that is stored in filesystem storage like HDFS/CEPH.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.biometric_file_hash IS 'Biometric File Hash: Hash value of the Biometric CBEFF file which is stored in DFS (HDFS/CEPH) storage. While reading the file, hash value of the file is verified with this hash value to ensure file validity.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql new file mode 100644 index 00000000..234855dc --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql @@ -0,0 +1,67 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.uin_document_h +-- Purpose : UIN Document History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.uin_document_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.uin_document_h CASCADE; +CREATE TABLE archive.uin_document_h( + uin_ref_id character varying(36) NOT NULL, + doccat_code character varying(36) NOT NULL, + doctyp_code character varying(64) NOT NULL, + eff_dtimes timestamp NOT NULL, + doc_id character varying(128) NOT NULL, + doc_name character varying(128) NOT NULL, + docfmt_code character varying(36) NOT NULL, + doc_hash character varying(64) NOT NULL, + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_uindh PRIMARY KEY (uin_ref_id,doccat_code,eff_dtimes), + CONSTRAINT uk_uindh UNIQUE (uin_ref_id,doc_id,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.uin_document_h IS 'UIN Document History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.uin_ref_id IS 'UIN Reference ID: System generated id mapped to a UIN used for references in the system. UIN reference ID is also used as folder/bucket in DFS (HDFS/CEPH) to store documents and biometric CBEFF file. refers to idrepo.uin.uin_ref_id'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.doccat_code IS 'Document Category Code: Category code under which document is uploaded during the registration process for ex., POA, POI, etc. Refers to master.doc_category.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.doctyp_code IS 'Document Type Code: Document type under which document is uploaded during the registration process for ex., passport, driving license, etc. Refers to master.doc_type.code.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.eff_dtimes IS 'Effective Datetimestamp : This to track base table record changes whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time till next change occurs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.doc_id IS 'Document ID: ID of the document that is stored in filesystem storage like HDFS/CEPH. If document ID Is not available then name of the file itself can be used as document ID.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.doc_name IS 'Document Name: Name of the document that is stored in filesystem storage like HDFS/CEPH.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.docfmt_code IS 'Document Format Code: Document format code of the document that is uploaded during the registration process for ex., PDF, JPG etc. Refers to master.doc_file_format.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.doc_hash IS 'Document Hash: Hash value of the document which is stored in DFS (HDFS/CEPH) storage. While reading the document, hash value of the document is verified with this hash value to ensure document validity.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql new file mode 100644 index 00000000..cdc4a435 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql @@ -0,0 +1,70 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.uin_h +-- Purpose : UIN History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.uin_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.uin_h CASCADE; +CREATE TABLE archive.uin_h( + uin_ref_id character varying(36) NOT NULL, + eff_dtimes timestamp NOT NULL, + uin character varying(500) NOT NULL, + uin_hash character varying(128) NOT NULL, + uin_data bytea NOT NULL, + uin_data_hash character varying(64) NOT NULL, + reg_id character varying(39) NOT NULL, + bio_ref_id character varying(128), + status_code character varying(32) NOT NULL, + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_uinh PRIMARY KEY (uin_ref_id,eff_dtimes), + CONSTRAINT uk_uinh UNIQUE (uin,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.uin_h IS 'UIN History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.uin_ref_id IS 'UIN Reference ID: System generated id mapped to a UIN used for references in the system. UIN reference ID is also used as folder/bucket in DFS (HDFS/CEPH) to store documents and biometric CBEFF file.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.eff_dtimes IS 'Effective Datetimestamp : This to track base table record changes whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time till next change occurs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.uin IS 'Unique Identification Number : Unique identification number assigned to individual.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.uin_hash IS 'Unique Identification Number Hash: Hash value of Unique identification number assigned to individual.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.uin_data IS 'UIN Data: Information of an individual stored in JSON file as per ID definition defined by the country in the system'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.uin_data_hash IS 'UIN Data Hash: Hash value of the UIN data which is stored in uin_data field. While reading the JSON file, hash value of the file is verified with this hash value to ensure file validity.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.reg_id IS 'Registration ID: Latest registration ID through which individual information got processed and registered'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.bio_ref_id IS 'Biometric Reference Id: Biometric reference id generated which will be used as a reference id in ABIS systems'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.status_code IS 'Status Code: Current Status code of the UIN. Refers to master.status_list.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql new file mode 100644 index 00000000..a71d48bb --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql @@ -0,0 +1,65 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive. +-- Purpose : +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.vid | type: TABLE -- +-- DROP TABLE IF EXISTS archive.vid CASCADE; +CREATE TABLE archive.vid( + id character varying(36) NOT NULL, + vid character varying(36) NOT NULL, + uin_hash character varying(128) NOT NULL, + uin character varying(500) NOT NULL, + vidtyp_code character varying(36) NOT NULL, + generated_dtimes timestamp NOT NULL, + expiry_dtimes timestamp, + status_code character varying(32) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_vid PRIMARY KEY (id), + CONSTRAINT uk_vid UNIQUE (vid), + CONSTRAINT uk_vid_uinhash UNIQUE (uin_hash,vidtyp_code,generated_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.vid IS 'VID: To store generated list of Virtual IDs mapped to a UIN that can be used for Authentication. UIN of an individual should be secure, not to be disclosed publicly, so as part of security, VIDs are introduced. VIDs are timebound, can be changed, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.id IS 'ID: Unique id generated by the system for each of the virtual id generated'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.vid IS 'Virtual ID: Vertual Identification Number assigned to an individual, This vertual id can be used for individual authentication instead of using UIN'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.uin_hash IS 'UIN Hash: Unique Identification Number Hash: Hash value of Unique identification number assigned to individual.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.uin IS 'UIN: Unique Identification Number : Unique identification number assigned to individual. Which is mapped to VID in idmap.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.vidtyp_code IS 'Virtual ID Type: Type of an VID, Individual can have any VIDs which will used for multiple purposes. VID type can be perpetual ID, timebound ID..etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.generated_dtimes IS 'Generated Date and Time: Date and timestamp when Vertual ID genereated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.expiry_dtimes IS 'Expiry Date and Time: Expiry Date and Time of the Vertual ID'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.status_code IS 'Status Code: Current Status code of the Virtual ID. Refers to master.status_list.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_db.sql b/data-archive/db_scripts/mosip_archive/mosip_archive_db.sql new file mode 100644 index 00000000..23bc63e7 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_db.sql @@ -0,0 +1,27 @@ +DROP DATABASE IF EXISTS mosip_archive; +CREATE DATABASE mosip_archive + ENCODING = 'UTF8' + LC_COLLATE = 'en_US.UTF-8' + LC_CTYPE = 'en_US.UTF-8' + TABLESPACE = pg_default + OWNER = sysadmin + TEMPLATE = template0; +-- ddl-end -- +COMMENT ON DATABASE mosip_archive IS 'Database to store all archive data, Data is archived from multiple tables from each module.'; +-- ddl-end -- + +\c mosip_archive sysadmin + +-- object: archive | type: SCHEMA -- +DROP SCHEMA IF EXISTS archive CASCADE; +CREATE SCHEMA archive; +-- ddl-end -- +ALTER SCHEMA archive OWNER TO sysadmin; +-- ddl-end -- + +ALTER DATABASE mosip_archive SET search_path TO archive,pg_catalog,public; +-- ddl-end -- + +-- REVOKECONNECT ON DATABASE mosip_archive FROM PUBLIC; +-- REVOKEALL ON SCHEMA archive FROM PUBLIC; +-- REVOKEALL ON ALL TABLES IN SCHEMA archive FROM PUBLIC ; diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh b/data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh new file mode 100644 index 00000000..03cb90b6 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh @@ -0,0 +1,112 @@ +### -- --------------------------------------------------------------------------------------------------------- +### -- Script Name : ARCHIVE DB Artifacts deploy +### -- Deploy Module : MOSIP ARCHIVE DAtabase +### -- Purpose : To deploy MOSIP ARCHIVE Database DB Artifacts. +### -- Create By : Sadanandegowda DM +### -- Created Date : Dec-2020 +### -- +### -- Modified Date Modified By Comments / Remarks +### -- ----------------------------------------------------------------------------------------------------------- + +######### Properties file ############# +set -e +properties_file="$1" +echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file" +#properties_file="./app.properties" +if [ -f "$properties_file" ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found." + while IFS='=' read -r key value + do + key=$(echo $key | tr '.' '_') + eval ${key}=\${value} + done < "$properties_file" +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument." +fi +echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------" +##############################################LOG FILE CREATION############################################################# + +today=`date '+%d%m%Y_%H%M%S'`; +LOG="${LOG_PATH}${MOSIP_DB_NAME}-${today}.log" +touch $LOG + + +SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null) + +if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ] +then +echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1 +else +echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1 +fi + +echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------" + +echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database scripts" | tee -a $LOG 2>&1 +echo `date "+%m/%d/%Y %H:%M:%S"` ": Database scripts are sourcing from :$BASEPATH" | tee -a $LOG 2>&1 + +#========================================DB Deployment process begins on ARCHIVE DB SERVER====================================== + +echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment on $MOSIP_DB_NAME database is started...." | tee -a $LOG 2>&1 +cd /$BASEPATH/$MOSIP_DB_NAME/ +VALUE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin','appadmin','dbadmin')";exit; >> $LOG 2>&1) + echo `date "+%m/%d/%Y %H:%M:%S"` ": Checking for existing users.... Count of existing users:"$VALUE | tee -a $LOG 2>&1 +if [ ${VALUE} == 0 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users" | tee -a $LOG 2>&1 + PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $COMMON_ROLE_FILENAME -v sysadminpwd=\'$SYSADMIN_PWD\' -v dbadminpwd=\'$DBADMIN_PWD\' -v appadminpwd=\'$APPADMIN_PWD\' >> $LOG 2>&1 +elif [ ${VALUE} == 1 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users" | tee -a $LOG 2>&1 + PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $COMMON_ROLE_FILENAME -v sysadminpwd=\'$SYSADMIN_PWD\' -v dbadminpwd=\'$DBADMIN_PWD\' -v appadminpwd=\'$APPADMIN_PWD\' >> $LOG 2>&1 +elif [ ${VALUE} == 2 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users" | tee -a $LOG 2>&1 + PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $COMMON_ROLE_FILENAME -v sysadminpwd=\'$SYSADMIN_PWD\' -v dbadminpwd=\'$DBADMIN_PWD\' -v appadminpwd=\'$APPADMIN_PWD\' >> $LOG 2>&1 +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Database users already exist" | tee -a $LOG 2>&1 +fi + +CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1) + +if [ ${CONN} == 0 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1 +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1 +fi +MASTERCONN=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('archiveuser')";exit; >> $LOG 2>&1) + +if [ ${MASTERCONN} == 0 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating Archive database user" | tee -a $LOG 2>&1 + PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $APP_ROLE_FILENAME -v dbuserpwd=\'$DBUSER_PWD\' >> $LOG 2>&1 +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Registration Device database user already exist" | tee -a $LOG 2>&1 +fi +PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $DB_CREATION_FILENAME >> $LOG 2>&1 +PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $ACCESS_GRANT_FILENAME >> $LOG 2>&1 +PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $DDL_FILENAME >> $LOG 2>&1 + + +if [ ${DML_FLAG} == 1 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying DML for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1 + PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $DML_FILENAME >> $LOG 2>&1 +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no DML deployment required for ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1 +fi + +if [ $(grep -c ERROR $LOG) -ne 0 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1 + echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP database deployment" | tee -a $LOG 2>&1 +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1 + echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database deployment" | tee -a $LOG 2>&1 +fi + +echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1 + +#========================================DB Deployment process completes on ARCHIVE DB SERVER====================================== diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql b/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql new file mode 100644 index 00000000..54319ffc --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql @@ -0,0 +1,19 @@ +\c mosip_archive sysadmin + +\ir ddl/archive-app_audit_log.sql +\ir ddl/archive-applicant_demographic_consumed.sql +\ir ddl/archive-applicant_document_consumed.sql +\ir ddl/archive-auth_transaction.sql +\ir ddl/archive-otp_transaction.sql +\ir ddl/archive-processed_prereg_list.sql +\ir ddl/archive-reg_appointment_consumed.sql +\ir ddl/archive-reg_demo_dedupe_list.sql +\ir ddl/archive-reg_manual_verification.sql +\ir ddl/archive-registered_authdevice_master_h.sql +\ir ddl/archive-registered_regdevice_master_h.sql +\ir ddl/archive-registration_transaction.sql +\ir ddl/archive-uin_biometric_h.sql +\ir ddl/archive-uin_document_h.sql +\ir ddl/archive-uin_h.sql +\ir ddl/archive-vid.sql + diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties b/data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties new file mode 100644 index 00000000..cb0b3559 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties @@ -0,0 +1,15 @@ +DB_SERVERIP= +DB_PORT= +SU_USER=postgres +DEFAULT_DB_NAME=postgres +MOSIP_DB_NAME=mosip_archive +SYSADMIN_USER=sysadmin +BASEPATH=/home/madmin/database/ +LOG_PATH=/home/madmin/logs/ +COMMON_ROLE_FILENAME=mosip_role_common.sql +APP_ROLE_FILENAME=mosip_role_archiveuser.sql +DB_CREATION_FILENAME=mosip_archive_db.sql +ACCESS_GRANT_FILENAME=mosip_archive_grants.sql +DDL_FILENAME=mosip_archive_ddl_deploy.sql +DML_FLAG=0 +DML_FILENAME=mosip_archive_dml_deploy.sql diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql b/data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql new file mode 100644 index 00000000..52ec69a4 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql @@ -0,0 +1,48 @@ +\c mosip_archive sysadmin + + +-- object: grant_b0ae4f0dce | type: PERMISSION -- +GRANT CREATE,CONNECT,TEMPORARY + ON DATABASE mosip_archive + TO sysadmin; +-- ddl-end -- + +-- object: grant_99dd1cb062 | type: PERMISSION -- +GRANT CREATE,CONNECT,TEMPORARY + ON DATABASE mosip_archive + TO appadmin; +-- ddl-end -- + +-- object: grant_18180691b7 | type: PERMISSION -- +GRANT CONNECT + ON DATABASE mosip_archive + TO archiveuser; +-- ddl-end -- + +-- object: grant_3543fb6cf7 | type: PERMISSION -- +GRANT CREATE,USAGE + ON SCHEMA archive + TO sysadmin; +-- ddl-end -- + +-- object: grant_8e1a2559ed | type: PERMISSION -- +GRANT USAGE + ON SCHEMA archive + TO archiveuser; +-- ddl-end -- + +-- object: grant_8e1a2559ed | type: PERMISSION -- +GRANT SELECT,INSERT,UPDATE,DELETE,TRUNCATE,REFERENCES + ON ALL TABLES IN SCHEMA archive + TO archiveuser; +-- ddl-end -- + +ALTER DEFAULT PRIVILEGES IN SCHEMA archive + GRANT SELECT,INSERT,UPDATE,DELETE,REFERENCES ON TABLES TO archiveuser; + + +-- object: grant_78ed2da4ee | type: PERMISSION -- +GRANT SELECT,INSERT,UPDATE,DELETE,TRUNCATE,REFERENCES + ON ALL TABLES IN SCHEMA archive + TO appadmin; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql b/data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql new file mode 100644 index 00000000..35c502ca --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql @@ -0,0 +1,7 @@ +-- object: archiveuser | type: ROLE -- +-- DROP ROLE IF EXISTS archiveuser; +CREATE ROLE archiveuser WITH + INHERIT + LOGIN + PASSWORD :dbuserpwd; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/mosip_role_common.sql b/data-archive/db_scripts/mosip_archive/mosip_role_common.sql new file mode 100644 index 00000000..4e4c083c --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_role_common.sql @@ -0,0 +1,31 @@ +-- object: sysadmin | type: ROLE -- +--DROP ROLE IF EXISTS sysadmin; +CREATE ROLE sysadmin WITH + SUPERUSER + CREATEDB + CREATEROLE + INHERIT + LOGIN + REPLICATION + PASSWORD :sysadminpwd; +-- ddl-end -- + +-- object: dbadmin | type: ROLE -- +--DROP ROLE IF EXISTS dbadmin; +CREATE ROLE dbadmin WITH + CREATEDB + CREATEROLE + INHERIT + LOGIN + REPLICATION + PASSWORD :dbadminpwd; +-- ddl-end -- + +-- object: appadmin | type: ROLE -- +--DROP ROLE IF EXISTS appadmin; +CREATE ROLE appadmin WITH + INHERIT + LOGIN + PASSWORD :appadminpwd; +-- ddl-end -- + From 8c9a836181c978aa6a1fd1c97df9ddbb5e248b42 Mon Sep 17 00:00:00 2001 From: Sada Date: Thu, 11 Feb 2021 21:52:13 +0530 Subject: [PATCH 002/130] Added archive scripts --- .../mosip_ida/mosip_archive_ida.ini | 18 ++ .../mosip_ida/mosip_archive_ida_table1.py | 107 +++++++++++ .../mosip_ida/mosip_archive_ida_table2.py | 107 +++++++++++ .../mosip_ida/mosip_archive_job_ida.sh | 16 ++ .../mosip_idrepo/mosip_archive_idrepo.ini | 20 ++ .../mosip_archive_idrepo_table1.py | 107 +++++++++++ .../mosip_archive_idrepo_table2.py | 107 +++++++++++ .../mosip_archive_idrepo_table3.py | 107 +++++++++++ .../mosip_idrepo/mosip_archive_job_idrepo.sh | 19 ++ .../mosip_prereg/mosip_archive_job_prereg.sh | 22 +++ .../mosip_prereg/mosip_archive_prereg.ini | 20 ++ .../mosip_archive_prereg_table1.py | 107 +++++++++++ .../mosip_archive_prereg_table2.py | 107 +++++++++++ .../mosip_archive_prereg_table3.py | 107 +++++++++++ .../mosip_archive_prereg_table4.py | 107 +++++++++++ .../mosip_regprc/mosip_archive_job_regprc.sh | 13 ++ .../mosip_regprc/mosip_archive_regprc.ini | 17 ++ .../mosip_archive_regprc_table1.py | 107 +++++++++++ data-archive/db_scripts/README.MD | 178 ++++++++++++++++++ .../ddl/archive-app_audit_log.sql | 75 ++++++++ ...archive-applicant_demographic_consumed.sql | 54 ++++++ .../archive-applicant_document_consumed.sql | 75 ++++++++ .../ddl/archive-auth_transaction.sql | 90 +++++++++ .../ddl/archive-otp_transaction.sql | 62 ++++++ .../ddl/archive-processed_prereg_list.sql | 41 ++++ .../ddl/archive-reg_appointment_consumed.sql | 58 ++++++ .../ddl/archive-reg_demo_dedupe_list.sql | 48 +++++ .../ddl/archive-reg_manual_verification.sql | 73 +++++++ ...archive-registered_authdevice_master_h.sql | 82 ++++++++ .../archive-registered_regdevice_master_h.sql | 82 ++++++++ .../ddl/archive-registration_transaction.sql | 72 +++++++ .../ddl/archive-uin_biometric_h.sql | 61 ++++++ .../ddl/archive-uin_document_h.sql | 67 +++++++ .../mosip_archive/ddl/archive-uin_h.sql | 70 +++++++ .../mosip_archive/ddl/archive-vid.sql | 65 +++++++ .../mosip_archive/mosip_archive_db.sql | 27 +++ .../mosip_archive/mosip_archive_db_deploy.sh | 112 +++++++++++ .../mosip_archive_ddl_deploy.sql | 19 ++ .../mosip_archive_deploy.properties | 15 ++ .../mosip_archive/mosip_archive_grants.sql | 48 +++++ .../mosip_archive/mosip_role_archiveuser.sql | 7 + .../mosip_archive/mosip_role_common.sql | 31 +++ 42 files changed, 2727 insertions(+) create mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini create mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py create mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py create mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh create mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini create mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py create mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py create mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py create mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py create mode 100644 data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh create mode 100644 data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini create mode 100644 data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py create mode 100644 data-archive/db_scripts/README.MD create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql create mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_db.sql create mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh create mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql create mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties create mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql create mode 100644 data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql create mode 100644 data-archive/db_scripts/mosip_archive/mosip_role_common.sql diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini new file mode 100644 index 00000000..9fc5cfea --- /dev/null +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini @@ -0,0 +1,18 @@ +[MOSIP-DB-SECTION] +source_db_serverip=13.233.223.29 +source_db_port=30090 +source_db_name=mosip_ida +source_schema_name=ida +source_db_uname=idacuser +source_db_pass=Mosip@dev123 +archive_table1=auth_transaction +archive_table2=otp_transaction + +archive_db_serverip=13.233.223.29 +archive_db_port=30090 +archive_db_name=mosip_archive +archive_schema_name=archive +archive_db_uname=archiveuser +archive_db_pass=Mosip@dev123 + +archive_older_than_days = 2 \ No newline at end of file diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py new file mode 100644 index 00000000..5bef0734 --- /dev/null +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Authentication DB Tables Archive +#-- DB Name : mosip_ida +#-- Table Names : auth_transaction +#-- Purpose : Job to Archive Data in ID Authentication DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table1"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py new file mode 100644 index 00000000..fab08c65 --- /dev/null +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Authentication DB Tables Archive +#-- DB Name : mosip_ida +#-- Table Names : applicant_document_consumed +#-- Purpose : Job to Archive Data in ID Authentication DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table2"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh b/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh new file mode 100644 index 00000000..0e867724 --- /dev/null +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh @@ -0,0 +1,16 @@ +### -- --------------------------------------------------------------------------------------------------------- +### -- Script Name : IDA Archive Job +### -- Deploy Module : IDA +### -- Purpose : To Archive IDA tables which are marked for archive. +### -- Create By : Sadanandegowda DM +### -- Created Date : Dec-2020 +### -- +### -- Modified Date Modified By Comments / Remarks +### -- ---------------------------------------------------------------------------------------- + +python mosip_archive_ida_table1.py & +sleep 5m + +python mosip_archive_ida_table2.py & + +#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini new file mode 100644 index 00000000..ec62c41d --- /dev/null +++ b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini @@ -0,0 +1,20 @@ +[MOSIP-DB-SECTION] +source_db_serverip=13.233.223.29 +source_db_port=30090 +source_db_name=mosip_idrepo +source_schema_name=idrepo +source_db_uname=idrepouser +source_db_pass=Mosip@dev123 +archive_table1=uin_h +archive_table2=uin_biometric_h +archive_table3=uin_document_h + + +archive_db_serverip=13.233.223.29 +archive_db_port=30090 +archive_db_name=mosip_archive +archive_schema_name=archive +archive_db_uname=archiveuser +archive_db_pass=Mosip@dev123 + +archive_older_than_days = 2 \ No newline at end of file diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py new file mode 100644 index 00000000..0f51685b --- /dev/null +++ b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Repository DB Tables Archive +#-- DB Name : mosip_idrepo +#-- Table Names : uin_h +#-- Purpose : Job to Archive Data in ID Repository DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table1"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE uin_ref_id ='"+row[0]+"'AND eff_dtimes='"+row[1]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py new file mode 100644 index 00000000..ba6f98f7 --- /dev/null +++ b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Repository DB Tables Archive +#-- DB Name : mosip_idrepo +#-- Table Names : uin_biometric_h +#-- Purpose : Job to Archive Data in ID Repository DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table2"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE uin_ref_id ='"+row[0]+"'AND biometric_file_type='"+row[1]+"'AND eff_dtimes='"+row[2]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py new file mode 100644 index 00000000..e73ed331 --- /dev/null +++ b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Repository DB Tables Archive +#-- DB Name : mosip_idrepo +#-- Table Names : uin_document_h +#-- Purpose : Job to Archive Data in ID Repository DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table3"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE uin_ref_id ='"+row[0]+"'AND doccat_code='"+row[1]+"'AND eff_dtimes='"+row[3]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh new file mode 100644 index 00000000..53433df9 --- /dev/null +++ b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh @@ -0,0 +1,19 @@ +### -- --------------------------------------------------------------------------------------------------------- +### -- Script Name : ID Repository Archive Job +### -- Deploy Module : Pre registration +### -- Purpose : To Archive ID Repository tables which are marked for archive. +### -- Create By : Sadanandegowda DM +### -- Created Date : Dec-2020 +### -- +### -- Modified Date Modified By Comments / Remarks +### -- ---------------------------------------------------------------------------------------- + +python mosip_archive_idrepo_table1.py & +sleep 5m + +python mosip_archive_idrepo_table2.py & +sleep 5m + +python mosip_archive_idrepo_table3.py & + +#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh b/data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh new file mode 100644 index 00000000..31b35123 --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh @@ -0,0 +1,22 @@ +### -- --------------------------------------------------------------------------------------------------------- +### -- Script Name : Pre Registration Archive Job +### -- Deploy Module : Pre registration +### -- Purpose : To Archive Pre Registration tables which are marked for archive. +### -- Create By : Sadanandegowda DM +### -- Created Date : Dec-2020 +### -- +### -- Modified Date Modified By Comments / Remarks +### -- ---------------------------------------------------------------------------------------- + +python mosip_archive_prereg_table1.py & +sleep 5m + +python mosip_archive_prereg_table2.py & +sleep 5m + +python mosip_archive_prereg_table3.py & +sleep 5m + +python mosip_archive_prereg_table4.py & + +#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini new file mode 100644 index 00000000..58cf5b8a --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini @@ -0,0 +1,20 @@ +[MOSIP-DB-SECTION] +source_db_serverip=13.233.223.29 +source_db_port=30090 +source_db_name=mosip_prereg +source_schema_name=prereg +source_db_uname=prereguser +source_db_pass=Mosip@dev123 +archive_table1=applicant_demographic_consumed +archive_table2=applicant_document_consumed +archive_table3=reg_appointment_consumed +archive_table4=processed_prereg_list + +archive_db_serverip=13.233.223.29 +archive_db_port=30090 +archive_db_name=mosip_archive +archive_schema_name=archive +archive_db_uname=archiveuser +archive_db_pass=Mosip@dev123 + +archive_older_than_days = 2 \ No newline at end of file diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py new file mode 100644 index 00000000..ebf1bbe7 --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Pre Registration DB Tables Archive +#-- DB Name : mosip_prereg +#-- Table Names : applicant_demographic_consumed +#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table1"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE prereg_id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py new file mode 100644 index 00000000..db2fca1e --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Pre Registration DB Tables Archive +#-- DB Name : mosip_prereg +#-- Table Names : applicant_document_consumed +#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table2"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py new file mode 100644 index 00000000..d1422594 --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Pre Registration DB Tables Archive +#-- DB Name : mosip_prereg +#-- Table Names : applicant_appointment_consumed +#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table3"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py new file mode 100644 index 00000000..ebf1bbe7 --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Pre Registration DB Tables Archive +#-- DB Name : mosip_prereg +#-- Table Names : applicant_demographic_consumed +#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table1"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE prereg_id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh b/data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh new file mode 100644 index 00000000..cfa6acbf --- /dev/null +++ b/data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh @@ -0,0 +1,13 @@ +### -- --------------------------------------------------------------------------------------------------------- +### -- Script Name : Registration Processor Archive Job +### -- Deploy Module : Pre registration +### -- Purpose : To Archive Registration Processor tables which are marked for archive. +### -- Create By : Sadanandegowda DM +### -- Created Date : Dec-2020 +### -- +### -- Modified Date Modified By Comments / Remarks +### -- ---------------------------------------------------------------------------------------- + +python mosip_archive_regprc_table1.py & + +#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini b/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini new file mode 100644 index 00000000..ff68deda --- /dev/null +++ b/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini @@ -0,0 +1,17 @@ +[MOSIP-DB-SECTION] +source_db_serverip=13.233.223.29 +source_db_port=30090 +source_db_name=mosip_regprc +source_schema_name=regprc +source_db_uname=regprcuser +source_db_pass=Mosip@dev123 +archive_table1=registration_transaction + +archive_db_serverip=13.233.223.29 +archive_db_port=30090 +archive_db_name=mosip_archive +archive_schema_name=archive +archive_db_uname=archiveuser +archive_db_pass=Mosip@dev123 + +archive_older_than_days = 2 \ No newline at end of file diff --git a/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py b/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py new file mode 100644 index 00000000..a58aabb2 --- /dev/null +++ b/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Registration Processor DB Tables Archive +#-- DB Name : mosip_regprc +#-- Table Names : registration_transaction +#-- Purpose : Job to Archive Data in registration processor DB for above mentioned tables +#-- Create By : Sadanandegowda DM +#-- Created Date : Dec-2020 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table1"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/db_scripts/README.MD b/data-archive/db_scripts/README.MD new file mode 100644 index 00000000..4d2ff543 --- /dev/null +++ b/data-archive/db_scripts/README.MD @@ -0,0 +1,178 @@ +## MOSIP Commons module Databases (**mosip_master, mosip_kernel, mosip_idrepo, mosip_idmap, mosip_iam, mosip_audit**) scripts inventory and deployment guidelines on postgresql database. + +#### The details disclosed below gives a clear information on complete database script structure with the instructions for database scripts deployments. + +## Prerequisities + +* DB Server and access details + +* Postgres client (psql) has to be installed on the deployment servers. + +* Copy latest database scripts(DDL, DML, .SH ... etc) from git/repository on to the DB deployment server. + +* Necessary details to be updated in peoperties file against to the releavnt variables being used (details listed below). + +* Database objects related to MOSIP modules are placed in "**mosip_base_directory**>>db_scripts>>mosip_ folder on git/repository + +**Example:** the commons module script folder is /**mosip_base_directory**>>db_scripts>>mosip_kernel where all the database scripts related to kernel are available. + +* Create a log file directory on DB deployment server before updating the properties file. Please follow the steps to create the same: + + bash-4.2$mkdir /mosip_base_directory/ + +* If we wish to place the log files under different directory other than the above mentioned then we need to create directory and specify the path of the directory in the properties file. + +* Pull the DB deployment scripts from Git repository to the deployment server and start deploying OR + +* If are pulling to local system from Git repository and pushing them back to deployment server using WinSCP then make a note to modify the following encoding settings in WinSCP before pushing the files to deployment server --> Open WinSCP --> Options --> Preferences --> Transfer --> Edit --> In "Transfer mode" section --> select "Text" --> Click Ok --> Click Ok + +## Each database folder has the following files / folders + +* **ddl folder:** This folder contains all the database data definition language (DDL) scripts to create or alter a database object of this module. + +* **dml folder:** This folder contains the scripts (insert/update/delete scripts) to create seed data / metadata needed to run this module. + +* **mosip__db.sql:** This file contains the database creation script of this module + +* **mosip__grants.sql:** The needed privilege / grants scripts assigned to database user / role to access database objects are described in this file. + +* **mosip_role_user.sql:** The role creation script that will be used by the application to perform DML operations is defined here. + +* **mosip_role_common.sql:** This file contains the common roles creation script that are needed to manage the database. + +* **mosip__ddl_deploy.sql:** This is a wrapper script used to **deploy the DDL scripts available in ddl folder**. This will also be used to prepare the script run sequence to manage all the needed dependency across DB objects being created. + +* **mosip__dml_deploy.sql:** This is a wrapper script used to **deploy the DML scripts available in dml folder**. This will also used to prepare the script run sequence to manage all the needed dependency across DB objects. + +* **mosip__db_deploy.sh:** This is the shell script available and present in each database folders/directories. + +* **mosip__deploy.properties:** This is the properties file name and present in each database. + +* **mosip_commons_db_deployment.sh:** This is the .sh file which is present in /home/madmin/database directory and which will be executed for all commons database deployment in single command execution. + +**Note :** Not all Modules will have dml scripts. Make necessary changes in the properties file with dml variables for the modules where dml exists. + +**Note :** No need to change anything in the shell script unless it is really causing any problem or any further implementation is being introduced. + +Once we complete with sourcing the database files, we need to follow the below DB deployment process with the modifying the properties file according the requirement. + +## Deployment can be performed in two ways based on the requirement and they are as follows: +1) DB Deployment for all common module databases +2) DB Deployment for single or selected databases + +### Properties file variable details and description: Properties file has to be updated with the required details before proceeding with deployment steps for each databases. + +**DB_SERVERIP:** Contains details of Destination DB SERVER_IP(Ex:10.0.0.1) where the deployment is targeted + +**DB_PORT:** Contains the postgres server port details where the postgres is allowed to connect. Ex: 5433 + +**SU_USER:** Contains the postgres super user name to connect to the postgres database i.e. postgres + +**SU_USER_PWD:** Contains the password for postgres super user + +**DEFAULT_DB_NAME:** Default database name to connect with respective postgres server i.e. ex: postgres + +**MOSIP_DB_NAME:** MOSIP Database name for which the deployment is scheduled. + +**SYSADMIN_USER:** This variable contains the mosip_common_role which indeed is going to be the super user for the remaining actions going to be performed by shell script. + +**SYSADMIN_PWD:** Contains the credential details for SYSADMIN_USER. + +**DBADMIN_PWD:** Contains the credential details for DBADMIN_USER. + +**APPADMIN_PWD:** Contains the credential details for APPADMIN_USER. + +**DBUSER_PWD:** Contains the credential details for dbuserpwd. + +**BASE_PATH:** Path for DB scrips which are kept in the Deployment server. + +**LOG_PATH:** Path where deployment log file will be created + +**COMMON_ROLE_FILENAME:** Contains the common roles creation filename, ex: mosip_role_common.sql + +**APP_ROLE_FILENAME:** Contains specific DB user role creation filename, ex: mosip_role_databaseuser.sql + +**DB_CREATION_FILENAME:** Contains specific DB creation script name, ex: mosip_database_db.sql. + +**ACCESS_GRANT_FILENAME:** This variable contains file name of access provisioning script details for the above created users, ex: mosip__grants.sql. + +**DDL_FILENAME:** DDL script file name, ex:mosip__ddl_deploy.sql. + +**DML_FLAG:** Its a flag variable which contains value as 0 or 1 for any DML existance for the particular DB. if flag=0 then no DML else flag=1. + +**DML_FILENAME:** DML cript file name only if the flag=1, else it will be empty or null, ex: mosip__dml_deploy.sql. + +**Note - Make sure, There is a single empty line at end of the .properties files content and No spaces in beggining and end of the parameter values** + +## DB Deployment for all common module databases with single click deployment: + +**Step 1** -> Make prior modification to all the respective database properties files **(mosip__deploy.properties)** in the respective database directories. Path of properties file and variables list remains same as explained above. Once the properties files are ready then access the directory where the deployment script is kept. + +**Step 2** -> Deployment on all common module databases, run the **"mosip_commons_db_deployment.sh"** script which is avialble in the /database directory. To access **"mosip_commons_db_deployment.sh"** script, follow the below given commands: + + **Enter:-bash-4.2$** cd /home/madmin/database/ + + **Enter:-bash-4.2$** bash mosip_commons_db_deployment.sh + +**Step 3** -> Please observe Post Deployment Validation steps below + +**No modification required to be done on any of the <>.sql files in the database folder. If it is required to be modified then please reach out to database team and have it modified.** + +## DB_Deployment for single or selected databases + +**Step 1:** update the properties(.properties) file with the required parameter values for single or selected databases. + +All these .sh and properties files are kept in each database directories. Please follow the below steps: + +**Step 2** -> Login into Deployment server/VM + +**Step 3** -> check the pwd(present working directory). Make sure we are inside the right database folder/directory to run the deployment for that specific database. + +**Enter:-bash-4.2$** pwd +This should be the path if we are performing deployment for the database name **mosip_schema_name** : /home/madmin/database/mosip_ + +**Step 4** -> Please move all the necessary files from local directory to the deployment server directory under respective databases. + +**Step 5** -> After prior modifications to the properties file, run the below deployment shell script as given: + +**Enter:-bash-4.2$** bash mosip__db_deploy.sh mosip__deploy.properties + +**Step 6** -> Please observe Post Deployment Validation steps below + +**No modification required to be done on any of the <>.sql files in the database folder. If it is required to be modified then please reach out to database team and have it modified.** + +### Post Deployment Validation + +**Note:** If you encounter the following messages then please recheck the details(ip address, port number, database name, password) entered in the properties file, the message would be as follows: + +. + + + + + +**Key points during or after the script execution:** + + * Properties file found message + + * Server status + + * Accessing the right path for DB deploy + + * Creates respective roles + + * Check for any active connections + + * Creates roles, creating Database, schemas, granting access, creating respective tables. + + * Loading data or DML operations valid only for those DB's which carries DML actions. + + * End of sourcing or deployment process. + +**Post deployment process, look out for database deployment log file which captures all stages of deployment. Log file path is defined in the properties file of the databases.** + +**During all the above stages please watch out for any errors which will be capture in the log file.** + +Kindly ignore **NOTICE** or **SKIPPING** messages. As these messages states that particular action is already in place hence sql script ignore performing again. + +### Post deployment process, look out for each database deployment log files which captures all stages of deployment. Log file path is defined in the properties file of the respective databases. diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql new file mode 100644 index 00000000..001596ae --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql @@ -0,0 +1,75 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.app_audit_log +-- Purpose : Application Audit Log : To track application related audit details for analysing, auditing and reporting purposes +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.app_audit_log | type: TABLE -- +-- DROP TABLE IF EXISTS archive.app_audit_log CASCADE; +CREATE TABLE archive.app_audit_log( + log_id character varying(64) NOT NULL, + log_dtimes timestamp NOT NULL, + log_desc character varying(2048), + event_id character varying(64) NOT NULL, + event_type character varying(64) NOT NULL, + event_name character varying(128) NOT NULL, + action_dtimes timestamp NOT NULL, + host_name character varying(128) NOT NULL, + host_ip character varying(16) NOT NULL, + session_user_id character varying(256) NOT NULL, + session_user_name character varying(128), + app_id character varying(64) NOT NULL, + app_name character varying(128) NOT NULL, + module_id character varying(64), + module_name character varying(128), + ref_id character varying(64), + ref_id_type character varying(64), + cr_by character varying(256) NOT NULL, + CONSTRAINT pk_audlog_log_id PRIMARY KEY (log_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.app_audit_log IS 'Application Audit Log : To track application related audit details for analysing, auditing and reporting purposes'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.log_id IS 'Log Id: Unique audit log id for each audit event log entry across the system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.log_dtimes IS 'Log DateTimestamp: Audit Log Datetimestamp'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.log_desc IS 'Log Description: Detailed description of the audit event'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.event_id IS 'Event Id: Event ID that triggered for which the audit action happend'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.event_type IS 'Event Type: Type of event that triggered the audit log, like, SYSTEM, USER, APPLICATION, BATCH etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.event_name IS 'Event Name: Event Name of the Event Id captured'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.action_dtimes IS 'Action DateTimestamp: Timestamp of an application action happend.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.host_name IS 'Host Name: Host Name of the Host ID captured, if any.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.host_ip IS 'Host Ip: Machine or device host Ip address of audit action event that happend/triggered'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.session_user_id IS 'Session user Id: Active User ID of the person who is logged in to the system and performing any action that triggered the audit log.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.session_user_name IS 'Session user Name: User Name of the Session User ID.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.app_id IS 'Application Id: Application Id of audit action happened and logged.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.app_name IS 'Application Name: Application Name'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.module_id IS 'Module Id: Application Module ID that triggered audit trigger log.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.module_name IS 'Module Name: Application Module Name of the Module ID captured.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.ref_id IS 'Reference Id: Reference ID for any cross reference purpose relevant for audit tracking, user id, app id, app or module id, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.ref_id_type IS 'Reference Id Type: Type of reference id entered'; +-- ddl-end -- +COMMENT ON COLUMN archive.app_audit_log.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql new file mode 100644 index 00000000..04a6d805 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql @@ -0,0 +1,54 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive. +-- Purpose : +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.applicant_demographic_consumed | type: TABLE -- +-- DROP TABLE IF EXISTS archive.applicant_demographic_consumed CASCADE; +CREATE TABLE archive.applicant_demographic_consumed( + prereg_id character varying(36) NOT NULL, + demog_detail bytea NOT NULL, + demog_detail_hash character varying(64) NOT NULL, + encrypted_dtimes timestamp NOT NULL, + status_code character varying(36) NOT NULL, + lang_code character varying(3) NOT NULL, + cr_appuser_id character varying(256) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + CONSTRAINT pk_appldemc_prereg_id PRIMARY KEY (prereg_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.applicant_demographic_consumed IS 'Applicant Demographic Consumed: Stores demographic details of an applicant that was comsumed.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.prereg_id IS 'Pre Registration ID: Unique Id generated for an individual during the pre-registration process which will be referenced during registration process at a registration center.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.demog_detail IS 'Demographic Detail: Demographic details of an individual, stored in json format.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.demog_detail_hash IS 'Demographic Detail Hash: Hash value of the demographic details stored in json format in a separate column. This will be used to make sure that nobody has tampered the data.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.encrypted_dtimes IS 'Encrypted Data Time: Date and time when the data was encrypted. This will also be used get the key for decrypting the data.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.status_code IS 'Status Code: Status of the pre-registration application. The application can be in draft / pending state or submitted state'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.cr_appuser_id IS 'Applciation Created User Id: User ID of the individual who is submitting the pre-registration application. It can be for self or for others like family members.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_demographic_consumed.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql new file mode 100644 index 00000000..696e7ee0 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql @@ -0,0 +1,75 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.applicant_document_consumed +-- Purpose : Applicant Document Consumed: Documents that are uploaded as part of pre-registration process which was consumed is maintained here. +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.applicant_document_consumed | type: TABLE -- +-- DROP TABLE IF EXISTS archive.applicant_document_consumed CASCADE; +CREATE TABLE archive.applicant_document_consumed( + id character varying(36) NOT NULL, + prereg_id character varying(36) NOT NULL, + doc_name character varying(128) NOT NULL, + doc_cat_code character varying(36) NOT NULL, + doc_typ_code character varying(36) NOT NULL, + doc_file_format character varying(36) NOT NULL, + doc_id character varying(128) NOT NULL, + doc_hash character varying(64) NOT NULL, + encrypted_dtimes timestamp NOT NULL, + status_code character varying(36) NOT NULL, + lang_code character varying(3) NOT NULL, + cr_by character varying(256), + cr_dtimes timestamp, + upd_by character varying(256), + upd_dtimes timestamp, + CONSTRAINT pk_appldocc_prereg_id PRIMARY KEY (id) + +); +-- indexes section ------------------------------------------------- +create unique index idx_appldocc_prereg_id on archive.applicant_document_consumed (prereg_id, doc_cat_code, doc_typ_code) ; + +-- ddl-end -- +COMMENT ON TABLE archive.applicant_document_consumed IS 'Applicant Document Consumed: Documents that are uploaded as part of pre-registration process which was consumed is maintained here. '; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.id IS 'Id: Unique id generated for the documents being uploaded as part of pre-registration process.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.prereg_id IS 'Pre Registration Id: Id of the pre-registration application for which the documents are being uploaded.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_name IS 'Document Name: Name of the document that is uploaded'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_cat_code IS 'Document Category Code: Document category code under which the document is being uploaded. Refers to master.document_category.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_typ_code IS 'Document Type Code: Document type code under which the document is being uploaded. Refers to master.document_type.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_file_format IS 'Documenet File Format: Format in which the document is being uploaded. Refers to master.document_file_format.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_id IS 'Document Id: ID of the document being uploaded'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.doc_hash IS 'Document Hash: Hash value of the document being uploaded in document store. This will be used to make sure that nobody has tampered the document stored in a separate store. '; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.encrypted_dtimes IS 'Encrypted Data Time: Date and time when the document was encrypted before uploading it on document store. This will also be used get the key for decrypting the data.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.status_code IS 'Status Code: Status of the document that is being uploaded.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.applicant_document_consumed.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- + + + + + + + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql new file mode 100644 index 00000000..24200af5 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql @@ -0,0 +1,90 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.auth_transaction +-- Purpose : Authentication Transaction : To track all authentication transactions steps / stages in the process +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.auth_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.auth_transaction CASCADE; +CREATE TABLE archive.auth_transaction( + id character varying(36) NOT NULL, + request_dtimes timestamp NOT NULL, + response_dtimes timestamp NOT NULL, + request_trn_id character varying(64), + auth_type_code character varying(36) NOT NULL, + status_code character varying(36) NOT NULL, + status_comment character varying(1024), + lang_code character varying(3) NOT NULL, + ref_id_type character varying(36), + ref_id character varying(64), + token_id character varying(128) NOT NULL, + requested_entity_type character varying(64), + requested_entity_id character varying(36), + requested_entity_name character varying(128), + static_tkn_id character varying(64), + request_signature character varying, + response_signature character varying, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_authtrn_id PRIMARY KEY (id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.auth_transaction IS 'Authentication Transaction : To track all authentication transactions steps / stages in the process flow.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.id IS 'ID: This is unique transaction id assigned for each authentication transaction'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.request_dtimes IS 'Request Datetimestamp : Timestamp of Authentication request received from client system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.response_dtimes IS 'Response Datetimestamp : Date timestamp of response sent back to client system for the authentication request. '; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.request_trn_id IS 'Request Transaction Id : Unique Authentication request transaction id assigned for each request received from client system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.auth_type_code IS 'Authentication Type Code : Type of authentication for the specific transaction, for ex., OTP, BIO, DEMO, etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.status_code IS 'Status Code : Current Status code of the transaction in a process flow.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.status_comment IS 'Status Comment : Description for the status entered/updated by user or system assigned for the specific transaction.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.ref_id_type IS 'Reference Id Type: Type of reference id entered in reference id column for ex., USER, VIRTUALID, UIN, PREREG, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.ref_id IS 'Reference Id: Reference ID for any cross reference purpose relevant for tracking, for ex., user id, uin, vid, prereg id, rid etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.token_id IS 'Token ID : Token ID generated in reference with UIN/VID'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.requested_entity_type IS 'Requested Entity Type: Type of entity through which the authentication request was initiated. It can from a partner, internal authenticaition, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.requested_entity_id IS 'Requested Entity Id: ID of the entity through which the authentication request was initiated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.requested_entity_name IS 'Requested Entity Name: Name of the entity through which the authentication request was initiated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.static_tkn_id IS 'Static Token Id : This is a static token id assigned for each authentication request. Static token id is combination of TSPID + UIN generated for any TSP or Individuls and sent back in response. End user can use this id while authenticating themselves. '; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.request_signature IS 'Request Signature: Request body information stored with signed'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.response_signature IS 'Response Signature: Response body stored with signed'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.auth_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql new file mode 100644 index 00000000..7034abf6 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql @@ -0,0 +1,62 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.otp_transaction +-- Purpose : OTP Transaction: All OTP related data and validation details are maintained here for ID Authentication +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.otp_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.otp_transaction CASCADE; +CREATE TABLE archive.otp_transaction( + id character varying(36) NOT NULL, + ref_id character varying(64) NOT NULL, + otp_hash character varying(512) NOT NULL, + generated_dtimes timestamp, + expiry_dtimes timestamp, + validation_retry_count smallint, + status_code character varying(36), + lang_code character varying(3), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_otpt_id PRIMARY KEY (id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.otp_transaction IS 'OTP Transaction: All OTP related data and validation details are maintained here for ID Authentication module.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.id IS 'ID: Key alias id is a unique identifier (UUID) used as an alias of the encryption key stored in keystore like HSM (hardware security module).'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.ref_id IS 'Reference ID: Reference ID is a reference information received from OTP requester which can be used while validating the OTP. AM: please give examples of ref_id'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.otp_hash IS 'OTP Hash: Hash of id, ref_id and otp which is generated based on the configuration setup and sent to the requester application / module.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.generated_dtimes IS 'Generated Date Time: Date and Time when the OTP was generated'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.expiry_dtimes IS 'Expiry Date Time: Date Time when the OTP will be expired'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.validation_retry_count IS 'Validation Retry Count: Validation retry counts of this OTP request. If the validation retry crosses the threshold limit, then the OTP will be de-activated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.status_code IS 'Status Code: Status of the OTP whether it is active or expired. AM: please enumerate the status types. They are only a few, not infinite'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.otp_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql new file mode 100644 index 00000000..fecc7d03 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql @@ -0,0 +1,41 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.processed_prereg_list +-- Purpose : Table to store all the pre-registration list received from registration processor within pre-registration module +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.processed_prereg_list | type: TABLE -- +-- DROP TABLE IF EXISTS archive.processed_prereg_list CASCADE; +CREATE TABLE archive.processed_prereg_list( + prereg_id character varying(36) NOT NULL, + first_received_dtimes timestamp NOT NULL, + status_code character varying(36) NOT NULL, + status_comments character varying(1024), + prereg_trn_id character varying(36), + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pprlst_pk PRIMARY KEY (prereg_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.processed_prereg_list IS 'Table to store all the pre-registration list received from registration processor within pre-registration module'; +-- ddl-end -- +COMMENT ON COLUMN archive.processed_prereg_list.prereg_id IS 'Pre-registration id that was consumed by registration processor to generate UIN'; +-- ddl-end -- +COMMENT ON COLUMN archive.processed_prereg_list.first_received_dtimes IS 'Datetime when the pre-registration id was first recevied'; +-- ddl-end -- +COMMENT ON COLUMN archive.processed_prereg_list.status_code IS 'status of the pre-registration status update into actual tables'; +-- ddl-end -- +COMMENT ON COLUMN archive.processed_prereg_list.status_comments IS 'status comments of the pre-registration status update into actual tables'; +-- ddl-end -- + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql new file mode 100644 index 00000000..630f4e46 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql @@ -0,0 +1,58 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.reg_appointment_consumed +-- Purpose : Registration Appointment Consumed: Stores all the appointment requests booked by an individual at a registration center that are consumed +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.reg_appointment_consumed | type: TABLE -- +-- DROP TABLE IF EXISTS archive.reg_appointment_consumed CASCADE; +CREATE TABLE archive.reg_appointment_consumed( + id character varying(36) NOT NULL, + regcntr_id character varying(10) NOT NULL, + prereg_id character varying(36) NOT NULL, + booking_dtimes timestamp NOT NULL, + appointment_date date, + slot_from_time time, + slot_to_time time, + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + CONSTRAINT pk_rappmntc_id PRIMARY KEY (id), + CONSTRAINT uk_rappmntc_id UNIQUE (prereg_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.reg_appointment_consumed IS 'Registration Appointment Consumed: Stores all the appointment requests booked by an individual at a registration center that are consumed. '; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.id IS 'ID: Unique id generated for the registration appointment booking.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.regcntr_id IS 'Registration Center ID: Id of the Registration Center where the appointment is taken. Refers to master.registration_center.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.prereg_id IS 'Pre-Registration Id: Pre-registration id for which registration appointment is taken.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.booking_dtimes IS 'Booking Date Time: Date and Time when the appointment booking is done.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.appointment_date IS 'Appointment Date: Date for which an individual has taken an aopointment for registration at a registration center'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.slot_from_time IS 'Slot From Time: Start time of the appointment slot.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.slot_to_time IS 'Slot To Time: End time of the appointment slot.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_appointment_consumed.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql new file mode 100644 index 00000000..b2612231 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql @@ -0,0 +1,48 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.reg_demo_dedupe_list +-- Purpose : Registration Demographic Deduplication List: List of matched UIN / RIDs, as part of demographic data. +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.reg_demo_dedupe_list | type: TABLE -- +-- DROP TABLE IF EXISTS archive.reg_demo_dedupe_list CASCADE; +CREATE TABLE archive.reg_demo_dedupe_list( + regtrn_id character varying(36) NOT NULL, + matched_reg_id character varying(39) NOT NULL, + reg_id character varying(39) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_regded PRIMARY KEY (matched_reg_id,regtrn_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.reg_demo_dedupe_list IS 'Registration Demographic Deduplication List: List of matched UIN / RIDs, as part of demographic data.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.regtrn_id IS 'Registration Transaction ID: ID of the demo dedupe transaction, Refers to archive.registration_transaction.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.matched_reg_id IS 'Matched Registration ID: Registration ID of the individual matching with the host registration id. It can be RID or any other id related to an individual.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.reg_id IS 'Registration ID: Registration ID for which the matches are found as part of the demographic dedupe process.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_demo_dedupe_list.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql new file mode 100644 index 00000000..69593e58 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql @@ -0,0 +1,73 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.reg_manual_verification +-- Purpose : Manual Verification: Stores all the registration request which goes through manual verification process, registration can be assinged to single/multiple manual verifier as part of the verification process +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.reg_manual_verification | type: TABLE -- +-- DROP TABLE IF EXISTS archive.reg_manual_verification CASCADE; +CREATE TABLE archive.reg_manual_verification( + reg_id character varying(39) NOT NULL, + matched_ref_id character varying(39) NOT NULL, + matched_ref_type character varying(36) NOT NULL, + mv_usr_id character varying(256), + matched_score numeric(6,3), + status_code character varying(36), + reason_code character varying(36), + status_comment character varying(256), + trntyp_code character varying(36), + lang_code character varying(3) NOT NULL, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_rmnlver_id PRIMARY KEY (reg_id,matched_ref_id,matched_ref_type) + +); +-- ddl-end -- +COMMENT ON TABLE archive.reg_manual_verification IS 'Manual Verification: Stores all the registration request which goes through manual verification process, registration can be assinged to single/multiple manual verifier as part of the verification process'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.reg_id IS 'Registration ID: ID of the registration request'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.matched_ref_id IS 'Mached Reference ID: Reference ID of the mached registrations, This id can be RID'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.matched_ref_type IS 'Mached reference ID Type: Type of the Reference ID'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.mv_usr_id IS 'Manual Verifier ID: User ID of the manual verifier'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.matched_score IS 'Mached Score: Mached score as part deduplication process, This will be the combined score of multiple ABISapplications'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.status_code IS 'Status Code : Status of the manual verification'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.reason_code IS 'Reason Code : Reason code provided by the manual verifier on reason for approve or reject the registration request as part of the verification process'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.status_comment IS 'Status Comment: Comments captured as part of manual verification process'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.trntyp_code IS 'Transaction Type Code : Code of the transaction type'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.reg_manual_verification.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql new file mode 100644 index 00000000..a50a5f00 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql @@ -0,0 +1,82 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.registered_authdevice_master_h +-- Purpose : +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.registered_authdevice_master_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.registered_authdevice_master_h CASCADE; +CREATE TABLE archive.registered_authdevice_master_h( + code character varying(36) NOT NULL, + status_code character varying(64), + device_id character varying(256) NOT NULL, + device_sub_id character varying(1024), + digital_id character varying(1024) NOT NULL, + serial_number character varying(64) NOT NULL, + device_detail_id character varying(36) NOT NULL, + purpose character varying(64) NOT NULL, + firmware character varying(128), + expiry_date timestamp, + certification_level character varying(3), + foundational_trust_provider_id character varying(36), + hotlisted boolean, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + eff_dtimes timestamp NOT NULL, + CONSTRAINT pk_authdevicemh_code PRIMARY KEY (code,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.registered_authdevice_master_h IS 'Registered Device History : History of changes of any MOSIP device registration will be stored in history table to track any chnages for future validations.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.code IS 'Registred Device Code : Unique ID generated / assigned for device which is registred in MOSIP system for the purpose'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.status_code IS 'Status Code : Status of the registered devices, The status code can be Registered, De-Registered or Retired/Revoked.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.device_id IS 'Device ID: Device ID is the unigue id provided by device provider for each device'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.device_sub_id IS 'Device Sub ID: Sub ID of the devices, Each device can have an array of sub IDs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.digital_id IS 'Digital ID: Digital ID received as a Json value containing below values like Serial number of the device, make , model, type, provider details..etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.serial_number IS 'Serial Number : Serial number of the device, This will be the Unique ID of the device by the provider'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.device_detail_id IS 'Device Detail ID'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.purpose IS 'Purpose : Purpose of these devices in the MOSIP system. ex. Registrations, Authentication, eKYC...etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.firmware IS 'Firmware: Firmware used in devices'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.expiry_date IS 'Expiry Date: expiry date of the device'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.certification_level IS 'Certification Level: Certification level for the device, This can be L0 or L1 devices'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.foundational_trust_provider_id IS 'Foundational Trust Provider ID: Foundational trust provider ID, This will be soft referenced from master.foundational_trust_provider.id. Required only for L1 devices.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_authdevice_master_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time.'; +-- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql new file mode 100644 index 00000000..96b89b95 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql @@ -0,0 +1,82 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.registered_regdevice_master_h +-- Purpose : Registered Device History : History of changes of any MOSIP device registration will be stored in history table to track any chnages for future validations. +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.registered_regdevice_master_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.registered_regdevice_master_h CASCADE; +CREATE TABLE archive.registered_regdevice_master_h( + code character varying(36) NOT NULL, + status_code character varying(64), + device_id character varying(256) NOT NULL, + device_sub_id character varying(1024), + digital_id character varying(1024) NOT NULL, + serial_number character varying(64) NOT NULL, + device_detail_id character varying(36) NOT NULL, + purpose character varying(64) NOT NULL, + firmware character varying(128), + expiry_date timestamp, + certification_level character varying(3), + foundational_trust_provider_id character varying(36), + hotlisted boolean, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + eff_dtimes timestamp NOT NULL, + CONSTRAINT pk_regdevicemh_code PRIMARY KEY (code,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.registered_regdevice_master_h IS 'Registered Device History : History of changes of any MOSIP device registration will be stored in history table to track any chnages for future validations.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.code IS 'Registred Device Code : Unique ID generated / assigned for device which is registred in MOSIP system for the purpose'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.status_code IS 'Status Code : Status of the registered devices, The status code can be Registered, De-Registered or Retired/Revoked.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.device_id IS 'Device ID: Device ID is the unigue id provided by device provider for each device'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.device_sub_id IS 'Device Sub ID: Sub ID of the devices, Each device can have an array of sub IDs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.digital_id IS 'Digital ID: Digital ID received as a Json value containing below values like Serial number of the device, make , model, type, provider details..etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.serial_number IS 'Serial Number : Serial number of the device, This will be the Unique ID of the device by the provider'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.device_detail_id IS 'Device Detail ID'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.purpose IS 'Purpose : Purpose of these devices in the MOSIP system. ex. Registrations, Authentication, eKYC...etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.firmware IS 'Firmware: Firmware used in devices'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.expiry_date IS 'Expiry Date: expiry date of the device'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.certification_level IS 'Certification Level: Certification level for the device, This can be L0 or L1 devices'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.foundational_trust_provider_id IS 'Foundational Trust Provider ID: Foundational trust provider ID, This will be soft referenced from master.foundational_trust_provider.id. Required only for L1 devices.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- +COMMENT ON COLUMN archive.registered_regdevice_master_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time.'; +-- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql new file mode 100644 index 00000000..16ab0bf8 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql @@ -0,0 +1,72 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.registration_transaction +-- Purpose : Registration Transaction: Registration Processor Transaction table is to store ALL Registration Processor packet processing/process transaction details for ID issuance +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.registration_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.registration_transaction CASCADE; +CREATE TABLE archive.registration_transaction( + id character varying(36) NOT NULL, + reg_id character varying(39) NOT NULL, + trn_type_code character varying(64) NOT NULL, + remarks character varying(256), + parent_regtrn_id character varying(36), + ref_id character varying(64), + ref_id_type character varying(64), + status_code character varying(36) NOT NULL, + sub_status_code character varying(36) NOT NULL, + lang_code character varying(3) NOT NULL, + status_comment character varying(256), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_regtrn_id PRIMARY KEY (id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.registration_transaction IS 'Registration Transaction: Registration Processor Transaction table is to store ALL Registration Processor packet processing/process transaction details for ID issuance'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.id IS 'ID: Transaction id of the transactions that were recorded in registration module/application'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.reg_id IS 'Registration ID: Registration id for which these transactions are carried out at the registration client application.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.trn_type_code IS 'Transaction Type Code: Type of transaction being processed. Refers to reg.transaction_type.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.remarks IS 'Transaction Remarks: Current remarks/comments of the transaction'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.parent_regtrn_id IS 'Parent Registration ID: Parent transaction id that has triggered this transaction (if any). Refers to reg.registration_transaction.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.ref_id IS 'Reference ID: Reference id for the transaction if any'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.ref_id_type IS 'reference ID Type: reference ID type of the transaction if any'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.status_code IS 'Status Code: Current status of the transaction. Refers to code field of master.status_list table.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.sub_status_code IS 'Sub Status Code: Current sub status of the registration transaction. Refers to code field of master.status_list table.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.status_comment IS 'Status Comment: Comments provided by the actor during the transaction processing.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.registration_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql new file mode 100644 index 00000000..a51f868e --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql @@ -0,0 +1,61 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive. +-- Purpose : +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.uin_biometric_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.uin_biometric_h CASCADE; +CREATE TABLE archive.uin_biometric_h( + uin_ref_id character varying(36) NOT NULL, + biometric_file_type character varying(36) NOT NULL, + eff_dtimes timestamp NOT NULL, + bio_file_id character varying(128) NOT NULL, + biometric_file_name character varying(128) NOT NULL, + biometric_file_hash character varying(64) NOT NULL, + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_uinbh PRIMARY KEY (uin_ref_id,biometric_file_type,eff_dtimes), + CONSTRAINT uk_uinbh UNIQUE (uin_ref_id,bio_file_id,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.uin_biometric_h IS 'UIN Biometric History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.uin_ref_id IS 'UIN Reference ID: System generated id mapped to a UIN used for references in the system. UIN reference ID is also used as folder/bucket in DFS (HDFS/CEPH) to store documents and biometric CBEFF file. refers to idrepo.uin.uin_ref_id'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.biometric_file_type IS 'Biometric File Type: Type of the biometric file stored in DFS (HDFS/CEPPH). File type can be individual biometric file or parent /guardian biometric file.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.eff_dtimes IS 'Effective Datetimestamp : This to track base table record changes whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time till next change occurs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.bio_file_id IS 'Biometric File ID: ID of the biometric CBEFF file that is stored in filesystem storage like HDFS/CEPH. If File ID Is not available then name of the file itself can be used as file ID.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.biometric_file_name IS 'Biometric File Name: Name of the biometric CBEFF file that is stored in filesystem storage like HDFS/CEPH.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.biometric_file_hash IS 'Biometric File Hash: Hash value of the Biometric CBEFF file which is stored in DFS (HDFS/CEPH) storage. While reading the file, hash value of the file is verified with this hash value to ensure file validity.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_biometric_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql new file mode 100644 index 00000000..234855dc --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql @@ -0,0 +1,67 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.uin_document_h +-- Purpose : UIN Document History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.uin_document_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.uin_document_h CASCADE; +CREATE TABLE archive.uin_document_h( + uin_ref_id character varying(36) NOT NULL, + doccat_code character varying(36) NOT NULL, + doctyp_code character varying(64) NOT NULL, + eff_dtimes timestamp NOT NULL, + doc_id character varying(128) NOT NULL, + doc_name character varying(128) NOT NULL, + docfmt_code character varying(36) NOT NULL, + doc_hash character varying(64) NOT NULL, + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_uindh PRIMARY KEY (uin_ref_id,doccat_code,eff_dtimes), + CONSTRAINT uk_uindh UNIQUE (uin_ref_id,doc_id,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.uin_document_h IS 'UIN Document History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.uin_ref_id IS 'UIN Reference ID: System generated id mapped to a UIN used for references in the system. UIN reference ID is also used as folder/bucket in DFS (HDFS/CEPH) to store documents and biometric CBEFF file. refers to idrepo.uin.uin_ref_id'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.doccat_code IS 'Document Category Code: Category code under which document is uploaded during the registration process for ex., POA, POI, etc. Refers to master.doc_category.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.doctyp_code IS 'Document Type Code: Document type under which document is uploaded during the registration process for ex., passport, driving license, etc. Refers to master.doc_type.code.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.eff_dtimes IS 'Effective Datetimestamp : This to track base table record changes whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time till next change occurs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.doc_id IS 'Document ID: ID of the document that is stored in filesystem storage like HDFS/CEPH. If document ID Is not available then name of the file itself can be used as document ID.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.doc_name IS 'Document Name: Name of the document that is stored in filesystem storage like HDFS/CEPH.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.docfmt_code IS 'Document Format Code: Document format code of the document that is uploaded during the registration process for ex., PDF, JPG etc. Refers to master.doc_file_format.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.doc_hash IS 'Document Hash: Hash value of the document which is stored in DFS (HDFS/CEPH) storage. While reading the document, hash value of the document is verified with this hash value to ensure document validity.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_document_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql new file mode 100644 index 00000000..cdc4a435 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql @@ -0,0 +1,70 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.uin_h +-- Purpose : UIN History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.uin_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.uin_h CASCADE; +CREATE TABLE archive.uin_h( + uin_ref_id character varying(36) NOT NULL, + eff_dtimes timestamp NOT NULL, + uin character varying(500) NOT NULL, + uin_hash character varying(128) NOT NULL, + uin_data bytea NOT NULL, + uin_data_hash character varying(64) NOT NULL, + reg_id character varying(39) NOT NULL, + bio_ref_id character varying(128), + status_code character varying(32) NOT NULL, + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_uinh PRIMARY KEY (uin_ref_id,eff_dtimes), + CONSTRAINT uk_uinh UNIQUE (uin,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.uin_h IS 'UIN History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.uin_ref_id IS 'UIN Reference ID: System generated id mapped to a UIN used for references in the system. UIN reference ID is also used as folder/bucket in DFS (HDFS/CEPH) to store documents and biometric CBEFF file.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.eff_dtimes IS 'Effective Datetimestamp : This to track base table record changes whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time till next change occurs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.uin IS 'Unique Identification Number : Unique identification number assigned to individual.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.uin_hash IS 'Unique Identification Number Hash: Hash value of Unique identification number assigned to individual.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.uin_data IS 'UIN Data: Information of an individual stored in JSON file as per ID definition defined by the country in the system'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.uin_data_hash IS 'UIN Data Hash: Hash value of the UIN data which is stored in uin_data field. While reading the JSON file, hash value of the file is verified with this hash value to ensure file validity.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.reg_id IS 'Registration ID: Latest registration ID through which individual information got processed and registered'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.bio_ref_id IS 'Biometric Reference Id: Biometric reference id generated which will be used as a reference id in ABIS systems'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.status_code IS 'Status Code: Current Status code of the UIN. Refers to master.status_list.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.uin_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql new file mode 100644 index 00000000..a71d48bb --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql @@ -0,0 +1,65 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive. +-- Purpose : +-- Create By : Sadanandegowda +-- Created Date : Dec-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.vid | type: TABLE -- +-- DROP TABLE IF EXISTS archive.vid CASCADE; +CREATE TABLE archive.vid( + id character varying(36) NOT NULL, + vid character varying(36) NOT NULL, + uin_hash character varying(128) NOT NULL, + uin character varying(500) NOT NULL, + vidtyp_code character varying(36) NOT NULL, + generated_dtimes timestamp NOT NULL, + expiry_dtimes timestamp, + status_code character varying(32) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_vid PRIMARY KEY (id), + CONSTRAINT uk_vid UNIQUE (vid), + CONSTRAINT uk_vid_uinhash UNIQUE (uin_hash,vidtyp_code,generated_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.vid IS 'VID: To store generated list of Virtual IDs mapped to a UIN that can be used for Authentication. UIN of an individual should be secure, not to be disclosed publicly, so as part of security, VIDs are introduced. VIDs are timebound, can be changed, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.id IS 'ID: Unique id generated by the system for each of the virtual id generated'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.vid IS 'Virtual ID: Vertual Identification Number assigned to an individual, This vertual id can be used for individual authentication instead of using UIN'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.uin_hash IS 'UIN Hash: Unique Identification Number Hash: Hash value of Unique identification number assigned to individual.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.uin IS 'UIN: Unique Identification Number : Unique identification number assigned to individual. Which is mapped to VID in idmap.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.vidtyp_code IS 'Virtual ID Type: Type of an VID, Individual can have any VIDs which will used for multiple purposes. VID type can be perpetual ID, timebound ID..etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.generated_dtimes IS 'Generated Date and Time: Date and timestamp when Vertual ID genereated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.expiry_dtimes IS 'Expiry Date and Time: Expiry Date and Time of the Vertual ID'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.status_code IS 'Status Code: Current Status code of the Virtual ID. Refers to master.status_list.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.vid.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_db.sql b/data-archive/db_scripts/mosip_archive/mosip_archive_db.sql new file mode 100644 index 00000000..23bc63e7 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_db.sql @@ -0,0 +1,27 @@ +DROP DATABASE IF EXISTS mosip_archive; +CREATE DATABASE mosip_archive + ENCODING = 'UTF8' + LC_COLLATE = 'en_US.UTF-8' + LC_CTYPE = 'en_US.UTF-8' + TABLESPACE = pg_default + OWNER = sysadmin + TEMPLATE = template0; +-- ddl-end -- +COMMENT ON DATABASE mosip_archive IS 'Database to store all archive data, Data is archived from multiple tables from each module.'; +-- ddl-end -- + +\c mosip_archive sysadmin + +-- object: archive | type: SCHEMA -- +DROP SCHEMA IF EXISTS archive CASCADE; +CREATE SCHEMA archive; +-- ddl-end -- +ALTER SCHEMA archive OWNER TO sysadmin; +-- ddl-end -- + +ALTER DATABASE mosip_archive SET search_path TO archive,pg_catalog,public; +-- ddl-end -- + +-- REVOKECONNECT ON DATABASE mosip_archive FROM PUBLIC; +-- REVOKEALL ON SCHEMA archive FROM PUBLIC; +-- REVOKEALL ON ALL TABLES IN SCHEMA archive FROM PUBLIC ; diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh b/data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh new file mode 100644 index 00000000..03cb90b6 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh @@ -0,0 +1,112 @@ +### -- --------------------------------------------------------------------------------------------------------- +### -- Script Name : ARCHIVE DB Artifacts deploy +### -- Deploy Module : MOSIP ARCHIVE DAtabase +### -- Purpose : To deploy MOSIP ARCHIVE Database DB Artifacts. +### -- Create By : Sadanandegowda DM +### -- Created Date : Dec-2020 +### -- +### -- Modified Date Modified By Comments / Remarks +### -- ----------------------------------------------------------------------------------------------------------- + +######### Properties file ############# +set -e +properties_file="$1" +echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file" +#properties_file="./app.properties" +if [ -f "$properties_file" ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found." + while IFS='=' read -r key value + do + key=$(echo $key | tr '.' '_') + eval ${key}=\${value} + done < "$properties_file" +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument." +fi +echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------" +##############################################LOG FILE CREATION############################################################# + +today=`date '+%d%m%Y_%H%M%S'`; +LOG="${LOG_PATH}${MOSIP_DB_NAME}-${today}.log" +touch $LOG + + +SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null) + +if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ] +then +echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1 +else +echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1 +fi + +echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------" + +echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database scripts" | tee -a $LOG 2>&1 +echo `date "+%m/%d/%Y %H:%M:%S"` ": Database scripts are sourcing from :$BASEPATH" | tee -a $LOG 2>&1 + +#========================================DB Deployment process begins on ARCHIVE DB SERVER====================================== + +echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment on $MOSIP_DB_NAME database is started...." | tee -a $LOG 2>&1 +cd /$BASEPATH/$MOSIP_DB_NAME/ +VALUE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin','appadmin','dbadmin')";exit; >> $LOG 2>&1) + echo `date "+%m/%d/%Y %H:%M:%S"` ": Checking for existing users.... Count of existing users:"$VALUE | tee -a $LOG 2>&1 +if [ ${VALUE} == 0 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users" | tee -a $LOG 2>&1 + PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $COMMON_ROLE_FILENAME -v sysadminpwd=\'$SYSADMIN_PWD\' -v dbadminpwd=\'$DBADMIN_PWD\' -v appadminpwd=\'$APPADMIN_PWD\' >> $LOG 2>&1 +elif [ ${VALUE} == 1 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users" | tee -a $LOG 2>&1 + PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $COMMON_ROLE_FILENAME -v sysadminpwd=\'$SYSADMIN_PWD\' -v dbadminpwd=\'$DBADMIN_PWD\' -v appadminpwd=\'$APPADMIN_PWD\' >> $LOG 2>&1 +elif [ ${VALUE} == 2 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users" | tee -a $LOG 2>&1 + PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $COMMON_ROLE_FILENAME -v sysadminpwd=\'$SYSADMIN_PWD\' -v dbadminpwd=\'$DBADMIN_PWD\' -v appadminpwd=\'$APPADMIN_PWD\' >> $LOG 2>&1 +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Database users already exist" | tee -a $LOG 2>&1 +fi + +CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1) + +if [ ${CONN} == 0 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1 +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1 +fi +MASTERCONN=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('archiveuser')";exit; >> $LOG 2>&1) + +if [ ${MASTERCONN} == 0 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating Archive database user" | tee -a $LOG 2>&1 + PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $APP_ROLE_FILENAME -v dbuserpwd=\'$DBUSER_PWD\' >> $LOG 2>&1 +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Registration Device database user already exist" | tee -a $LOG 2>&1 +fi +PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $DB_CREATION_FILENAME >> $LOG 2>&1 +PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $ACCESS_GRANT_FILENAME >> $LOG 2>&1 +PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $DDL_FILENAME >> $LOG 2>&1 + + +if [ ${DML_FLAG} == 1 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying DML for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1 + PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $DML_FILENAME >> $LOG 2>&1 +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no DML deployment required for ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1 +fi + +if [ $(grep -c ERROR $LOG) -ne 0 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1 + echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP database deployment" | tee -a $LOG 2>&1 +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1 + echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database deployment" | tee -a $LOG 2>&1 +fi + +echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1 + +#========================================DB Deployment process completes on ARCHIVE DB SERVER====================================== diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql b/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql new file mode 100644 index 00000000..54319ffc --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql @@ -0,0 +1,19 @@ +\c mosip_archive sysadmin + +\ir ddl/archive-app_audit_log.sql +\ir ddl/archive-applicant_demographic_consumed.sql +\ir ddl/archive-applicant_document_consumed.sql +\ir ddl/archive-auth_transaction.sql +\ir ddl/archive-otp_transaction.sql +\ir ddl/archive-processed_prereg_list.sql +\ir ddl/archive-reg_appointment_consumed.sql +\ir ddl/archive-reg_demo_dedupe_list.sql +\ir ddl/archive-reg_manual_verification.sql +\ir ddl/archive-registered_authdevice_master_h.sql +\ir ddl/archive-registered_regdevice_master_h.sql +\ir ddl/archive-registration_transaction.sql +\ir ddl/archive-uin_biometric_h.sql +\ir ddl/archive-uin_document_h.sql +\ir ddl/archive-uin_h.sql +\ir ddl/archive-vid.sql + diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties b/data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties new file mode 100644 index 00000000..cb0b3559 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties @@ -0,0 +1,15 @@ +DB_SERVERIP= +DB_PORT= +SU_USER=postgres +DEFAULT_DB_NAME=postgres +MOSIP_DB_NAME=mosip_archive +SYSADMIN_USER=sysadmin +BASEPATH=/home/madmin/database/ +LOG_PATH=/home/madmin/logs/ +COMMON_ROLE_FILENAME=mosip_role_common.sql +APP_ROLE_FILENAME=mosip_role_archiveuser.sql +DB_CREATION_FILENAME=mosip_archive_db.sql +ACCESS_GRANT_FILENAME=mosip_archive_grants.sql +DDL_FILENAME=mosip_archive_ddl_deploy.sql +DML_FLAG=0 +DML_FILENAME=mosip_archive_dml_deploy.sql diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql b/data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql new file mode 100644 index 00000000..52ec69a4 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql @@ -0,0 +1,48 @@ +\c mosip_archive sysadmin + + +-- object: grant_b0ae4f0dce | type: PERMISSION -- +GRANT CREATE,CONNECT,TEMPORARY + ON DATABASE mosip_archive + TO sysadmin; +-- ddl-end -- + +-- object: grant_99dd1cb062 | type: PERMISSION -- +GRANT CREATE,CONNECT,TEMPORARY + ON DATABASE mosip_archive + TO appadmin; +-- ddl-end -- + +-- object: grant_18180691b7 | type: PERMISSION -- +GRANT CONNECT + ON DATABASE mosip_archive + TO archiveuser; +-- ddl-end -- + +-- object: grant_3543fb6cf7 | type: PERMISSION -- +GRANT CREATE,USAGE + ON SCHEMA archive + TO sysadmin; +-- ddl-end -- + +-- object: grant_8e1a2559ed | type: PERMISSION -- +GRANT USAGE + ON SCHEMA archive + TO archiveuser; +-- ddl-end -- + +-- object: grant_8e1a2559ed | type: PERMISSION -- +GRANT SELECT,INSERT,UPDATE,DELETE,TRUNCATE,REFERENCES + ON ALL TABLES IN SCHEMA archive + TO archiveuser; +-- ddl-end -- + +ALTER DEFAULT PRIVILEGES IN SCHEMA archive + GRANT SELECT,INSERT,UPDATE,DELETE,REFERENCES ON TABLES TO archiveuser; + + +-- object: grant_78ed2da4ee | type: PERMISSION -- +GRANT SELECT,INSERT,UPDATE,DELETE,TRUNCATE,REFERENCES + ON ALL TABLES IN SCHEMA archive + TO appadmin; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql b/data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql new file mode 100644 index 00000000..35c502ca --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql @@ -0,0 +1,7 @@ +-- object: archiveuser | type: ROLE -- +-- DROP ROLE IF EXISTS archiveuser; +CREATE ROLE archiveuser WITH + INHERIT + LOGIN + PASSWORD :dbuserpwd; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/mosip_role_common.sql b/data-archive/db_scripts/mosip_archive/mosip_role_common.sql new file mode 100644 index 00000000..4e4c083c --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/mosip_role_common.sql @@ -0,0 +1,31 @@ +-- object: sysadmin | type: ROLE -- +--DROP ROLE IF EXISTS sysadmin; +CREATE ROLE sysadmin WITH + SUPERUSER + CREATEDB + CREATEROLE + INHERIT + LOGIN + REPLICATION + PASSWORD :sysadminpwd; +-- ddl-end -- + +-- object: dbadmin | type: ROLE -- +--DROP ROLE IF EXISTS dbadmin; +CREATE ROLE dbadmin WITH + CREATEDB + CREATEROLE + INHERIT + LOGIN + REPLICATION + PASSWORD :dbadminpwd; +-- ddl-end -- + +-- object: appadmin | type: ROLE -- +--DROP ROLE IF EXISTS appadmin; +CREATE ROLE appadmin WITH + INHERIT + LOGIN + PASSWORD :appadminpwd; +-- ddl-end -- + From 7cc3a6e283d3f0f7e4e76d54aaf0e511d04fa636 Mon Sep 17 00:00:00 2001 From: rambhatt1591 <77657115+rambhatt1591@users.noreply.github.com> Date: Thu, 28 Oct 2021 10:57:38 +0530 Subject: [PATCH 003/130] MOSIP-15361,MOSIP-12096, MOSIP-12095 Archival for IDA and PreReg Table (#788) * MOSIP-15361 Archival for IDA Tables * MOSIP-12096 Prereg application table archival * Update mosip_archive_ida.ini * MOSIP-12095 Pre Reg OTP Transaction table script * MOSIP-12095 Deploy file changes --- .../mosip_ida/mosip_archive_ida.ini | 4 +- .../mosip_ida/mosip_archive_ida_table3.py | 107 ++++++++++++++++++ .../mosip_ida/mosip_archive_job_ida.sh | 8 ++ .../mosip_prereg/mosip_archive_prereg.ini | 5 +- .../mosip_archive_prereg_table5.py | 107 ++++++++++++++++++ .../mosip_archive_prereg_table6.py | 107 ++++++++++++++++++ .../ddl/archive-applications.sql | 33 ++++++ .../ddl/archive-credential_event_store.sql | 67 +++++++++++ .../mosip_archive_ddl_deploy.sql | 5 + 9 files changed, 441 insertions(+), 2 deletions(-) create mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table5.py create mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table6.py create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-applications.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-credential_event_store.sql diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini index 9fc5cfea..1aa68ddc 100644 --- a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini @@ -8,6 +8,8 @@ source_db_pass=Mosip@dev123 archive_table1=auth_transaction archive_table2=otp_transaction +archive_table3=credential_event_store + archive_db_serverip=13.233.223.29 archive_db_port=30090 archive_db_name=mosip_archive @@ -15,4 +17,4 @@ archive_schema_name=archive archive_db_uname=archiveuser archive_db_pass=Mosip@dev123 -archive_older_than_days = 2 \ No newline at end of file +archive_older_than_days = 2 diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py new file mode 100644 index 00000000..bf88939b --- /dev/null +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : ID Authentication DB Tables Archive +#-- DB Name : mosip_ida +#-- Table Names : credential_event_store +#-- Purpose : Job to Archive Data in ID Authentication DB for above mentioned tables +#-- Create By : Ram Bhatt +#-- Created Date : Oct-2021 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table3"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh b/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh index 0e867724..1ccd8beb 100644 --- a/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh @@ -7,10 +7,18 @@ ### -- ### -- Modified Date Modified By Comments / Remarks ### -- ---------------------------------------------------------------------------------------- +### -- Oct-2021 Ram Bhatt Added archival scripts for credential_event_store +### -- -------------------------------------------------------------------------------------------- + + python mosip_archive_ida_table1.py & sleep 5m python mosip_archive_ida_table2.py & +sleep 5m + +python mosip_archive_ida_table3.py & + #=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini index 58cf5b8a..054377ec 100644 --- a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini @@ -10,6 +10,9 @@ archive_table2=applicant_document_consumed archive_table3=reg_appointment_consumed archive_table4=processed_prereg_list +archive_table5=applications +archive_table6=otp_transaction + archive_db_serverip=13.233.223.29 archive_db_port=30090 archive_db_name=mosip_archive @@ -17,4 +20,4 @@ archive_schema_name=archive archive_db_uname=archiveuser archive_db_pass=Mosip@dev123 -archive_older_than_days = 2 \ No newline at end of file +archive_older_than_days = 2 diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table5.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table5.py new file mode 100644 index 00000000..bb2cc66a --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table5.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Pre Registration DB Tables Archive +#-- DB Name : mosip_prereg +#-- Table Names : applications +#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables +#-- Create By : Ram Bhatt +#-- Created Date : Oct-2021 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table5"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE application_status_code = 'BOOKED' AND cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE prereg_id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table6.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table6.py new file mode 100644 index 00000000..48b39b2f --- /dev/null +++ b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table6.py @@ -0,0 +1,107 @@ +#-- ------------------------------------------------------------------------------------------------- +#-- Job Name : Pre Registration DB Tables Archive +#-- DB Name : mosip_prereg +#-- Table Names : otp_transaction +#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables +#-- Create By : Ram Bhatt +#-- Created Date : Oct-2021 +#-- +#-- Modified Date Modified By Comments / Remarks +#-- ------------------------------------------------------------------------------------------ +#-- +#-- ------------------------------------------------------------------------------------------ + +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys + +import configparser +import psycopg2 +import datetime + +from configparser import ConfigParser +from datetime import datetime + +def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): + parser = ConfigParser() + parser.read(filename) + dbparam = {} + if parser.has_section(section): + params = parser.items(section) + for param in params: + dbparam[param[0]] = param[1] + else: + raise Exception('Section {0} not found in the {1} file'.format(section, filename)) + + return dbparam + +def getValues(row): + finalValues ="" + for values in row: + finalValues = finalValues+"'"+str(values)+"'," + + finalValues = finalValues[0:-1] + return finalValues + +def dataArchive(): + sourseConn = None + archiveConn = None + try: + + dbparam = config() + + print('Connecting to the PostgreSQL database...') + sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], + password=dbparam["source_db_pass"], + host=dbparam["source_db_serverip"], + port=dbparam["source_db_port"], + database=dbparam["source_db_name"]) + archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], + password=dbparam["archive_db_pass"], + host=dbparam["archive_db_serverip"], + port=dbparam["archive_db_port"], + database=dbparam["archive_db_name"]) + + sourceCur = sourseConn.cursor() + archiveCur = archiveConn.cursor() + + tableName=dbparam["archive_table6"] + sschemaName = dbparam["source_schema_name"] + aschemaName = dbparam["archive_schema_name"] + oldDays = dbparam["archive_older_than_days"] + + print(tableName) + select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record selected for archive from ", tableName) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + print(insert_count, ": Record inserted successfully ") + if insert_count > 0: + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE prereg_id ='"+row[0]+"'" + sourceCur.execute(delete_query) + sourseConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record deleted successfully") + + except (Exception, psycopg2.DatabaseError) as error: + print(error) + finally: + if sourseConn is not None: + sourceCur.close() + sourseConn.close() + print('Database sourse connection closed.') + if archiveConn is not None: + archiveCur.close() + archiveConn.close() + print('Database archive connection closed.') + +if __name__ == '__main__': + dataArchive() diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-applications.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-applications.sql new file mode 100644 index 00000000..6196d359 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-applications.sql @@ -0,0 +1,33 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.applications +-- Purpose : Applications: +-- +-- Create By : Ram Bhatt +-- Created Date : Oct-2021 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ +-- object: archive.applications | type: TABLE -- +-- DROP TABLE IF EXISTS archive.applications CASCADE; +CREATE TABLE archive.applications( + application_id character varying(36) NOT NULL, + booking_type character varying(256) NOT NULL, + booking_status_code character varying(256), + application_status_code character varying(256), + regcntr_id character varying(10), + appointment_date date, + booking_date date, + slot_from_time time without time zone, + slot_to_time time without time zone, + contact_info character varying(256), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp without time zone NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp without time zone, + CONSTRAINT appid_pk PRIMARY KEY (application_id) + +); +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-credential_event_store.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-credential_event_store.sql new file mode 100644 index 00000000..42baf9de --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-credential_event_store.sql @@ -0,0 +1,67 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_archive +-- Table Name : archive.credential_event_store +-- Purpose : +-- +-- Create By : Ram Bhatt +-- Created Date : Oct-2021 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- ------------------------------------------------------------------------------------------ + +-- object: archive.credential_event_store | type: TABLE -- +-- DROP TABLE IF EXISTS archive.credential_event_store CASCADE; +CREATE TABLE archive.credential_event_store( + event_id character varying(36) NOT NULL, + event_topic character varying(256) NOT NULL, + credential_transaction_id character varying(36) NOT NULL, + publisher character varying(128), + published_on_dtimes timestamp, + event_object character varying, + status_code character varying(36), + retry_count smallint, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_ces_id PRIMARY KEY (event_id) + +); +-- ddl-end -- +--index section starts---- +CREATE INDEX ind_ces_id ON archive.credential_event_store (cr_dtimes); +--index section ends------ +COMMENT ON TABLE archive.credential_event_store IS 'Credential Event Store: Store all credential request in IDA and their status, Retry request incase of failure'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.event_id IS 'Event ID: Event id of the credential request'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.event_topic IS 'Event Topic: Topic of the credential request where message is requested through websub'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.credential_transaction_id IS 'Credential transaction id where credential request details are stored'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.publisher IS 'Pusblisher of the messages'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.published_on_dtimes IS 'Date and time of the message published'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.event_object IS 'Credential event object details'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.status_code IS 'Status of the envent ex: NEW, STORED, FAILED, FAILED_WITH_MAX_RETRIES'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.retry_count IS 'Retry count of the credential request event incase of failure'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.credential_event_store.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- + diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql b/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql index 54319ffc..fc6f5334 100644 --- a/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql +++ b/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql @@ -17,3 +17,8 @@ \ir ddl/archive-uin_h.sql \ir ddl/archive-vid.sql + +\ir ddl/archive-credential_event_store.sql +\ir ddl/archive-applications.sql +\ir ddl/archive-otp_transaction.sql + From d73f248780fa086854e7eb9cd23e001411eadb36 Mon Sep 17 00:00:00 2001 From: rambhatt1591 Date: Sun, 26 Dec 2021 21:16:03 +0530 Subject: [PATCH 004/130] MOSIP-18968 Python script changes --- .../archive-jobs/mosip_ida/mosip_archive_ida_table1.py | 2 +- .../archive-jobs/mosip_ida/mosip_archive_ida_table2.py | 4 ++-- .../archive-jobs/mosip_ida/mosip_archive_ida_table3.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py index 5bef0734..e1f4b8ea 100644 --- a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py @@ -22,7 +22,7 @@ from configparser import ConfigParser from datetime import datetime -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): +def config(filename='mosip_archive_ida.ini', section='MOSIP-DB-SECTION'): parser = ConfigParser() parser.read(filename) dbparam = {} diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py index fab08c65..25950a2b 100644 --- a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py @@ -1,7 +1,7 @@ #-- ------------------------------------------------------------------------------------------------- #-- Job Name : ID Authentication DB Tables Archive #-- DB Name : mosip_ida -#-- Table Names : applicant_document_consumed +#-- Table Names : otp_transaction #-- Purpose : Job to Archive Data in ID Authentication DB for above mentioned tables #-- Create By : Sadanandegowda DM #-- Created Date : Dec-2020 @@ -22,7 +22,7 @@ from configparser import ConfigParser from datetime import datetime -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): +def config(filename='mosip_archive_ida.ini', section='MOSIP-DB-SECTION'): parser = ConfigParser() parser.read(filename) dbparam = {} diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py index bf88939b..2fc64647 100644 --- a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py +++ b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py @@ -22,7 +22,7 @@ from configparser import ConfigParser from datetime import datetime -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): +def config(filename='mosip_archive_ida.ini', section='MOSIP-DB-SECTION'): parser = ConfigParser() parser.read(filename) dbparam = {} @@ -85,7 +85,7 @@ def dataArchive(): insert_count = archiveCur.rowcount print(insert_count, ": Record inserted successfully ") if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" + delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE event_id ='"+row[0]+"'" sourceCur.execute(delete_query) sourseConn.commit() delete_count = sourceCur.rowcount From 6e86092f36592daf07bab83926b9a961e2c3ef75 Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Tue, 14 Nov 2023 14:20:07 +0530 Subject: [PATCH 005/130] [MOSIP-27423] updated dockerfile and optimised py script Signed-off-by: bhumi46 --- .github/keys/mosipgpgkey_pub.gpg | 30 +++ .github/keys/mosipgpgkey_sec.gpg | 59 ++++++ .github/workflows/push-trigger.yml | 40 ++++ .github/workflows/tag.yml | 35 ++++ data-archive/.dockerignore | 13 ++ data-archive/Dockerfile | 111 +++++++++++ .../audit_archive_table_info.json | 11 ++ .../credential_archive_table_info.json | 11 ++ data-archive/archive-jobs/db.properties | 34 ++++ .../esignet_archive_table_info.json | 11 ++ .../archive-jobs/ida_archive_table_info.json | 18 ++ .../idrepo_archive_table_info.json | 25 +++ .../kernel_archive_table_info.json | 11 ++ .../master_archive_table_info.json | 46 +++++ .../archive-jobs/mosip_archive_main.py | 153 +++++++++++++++ .../mosip_ida/mosip_archive_ida.ini | 20 -- .../mosip_ida/mosip_archive_ida_table1.py | 107 ----------- .../mosip_ida/mosip_archive_ida_table2.py | 107 ----------- .../mosip_ida/mosip_archive_ida_table3.py | 107 ----------- .../mosip_ida/mosip_archive_job_ida.sh | 24 --- .../mosip_idrepo/mosip_archive_idrepo.ini | 20 -- .../mosip_archive_idrepo_table1.py | 107 ----------- .../mosip_archive_idrepo_table2.py | 107 ----------- .../mosip_archive_idrepo_table3.py | 107 ----------- .../mosip_idrepo/mosip_archive_job_idrepo.sh | 19 -- .../mosip_prereg/mosip_archive_job_prereg.sh | 22 --- .../mosip_prereg/mosip_archive_prereg.ini | 23 --- .../mosip_archive_prereg_table1.py | 107 ----------- .../mosip_archive_prereg_table2.py | 107 ----------- .../mosip_archive_prereg_table3.py | 107 ----------- .../mosip_archive_prereg_table4.py | 107 ----------- .../mosip_archive_prereg_table5.py | 107 ----------- .../mosip_archive_prereg_table6.py | 107 ----------- .../mosip_regprc/mosip_archive_job_regprc.sh | 13 -- .../mosip_regprc/mosip_archive_regprc.ini | 17 -- .../mosip_archive_regprc_table1.py | 107 ----------- .../archive-jobs/pms_archive_table_info.json | 25 +++ .../regprc_archive_table_info.json | 39 ++++ .../resident_archive_table_info.json | 39 ++++ data-archive/archive.sh | 23 +++ data-archive/db.sh | 23 +++ data-archive/db_scripts/README.MD | 180 +----------------- data-archive/db_scripts/mosip_archive/db.sql | 14 ++ data-archive/db_scripts/mosip_archive/ddl.sql | 43 +++++ .../ddl/archive-app_audit_log.sql | 75 -------- ...archive-applicant_demographic_consumed.sql | 54 ------ .../archive-applicant_document_consumed.sql | 75 -------- .../ddl/archive-applications.sql | 33 ---- .../ddl/archive-audit-app_audit_log.sql | 63 ++++++ .../ddl/archive-auth_transaction.sql | 90 --------- ...archive-credential-batch_job_execution.sql | 20 ++ ...credential-batch_job_execution_context.sql | 14 ++ ...-credential-batch_job_execution_params.sql | 17 ++ .../archive-credential-batch_job_instance.sql | 13 ++ ...rchive-credential-batch_step_execution.sql | 27 +++ ...redential-batch_step_execution_context.sql | 15 ++ ...hive-credential-credential_transaction.sql | 69 +++++++ .../ddl/archive-credential_event_store.sql | 67 ------- .../ddl/archive-esignet-consent_history.sql | 44 +++++ .../ddl/archive-ida-anonymous_profile.sql | 45 +++++ .../ddl/archive-ida-auth_transaction.sql | 97 ++++++++++ .../ddl/archive-ida-batch_job_execution.sql | 20 ++ ...rchive-ida-batch_job_execution_context.sql | 14 ++ ...archive-ida-batch_job_execution_params.sql | 17 ++ .../ddl/archive-ida-batch_job_instance.sql | 13 ++ .../ddl/archive-ida-batch_step_execution.sql | 27 +++ ...chive-ida-batch_step_execution_context.sql | 15 ++ .../archive-ida-credential_event_store.sql | 69 +++++++ .../ddl/archive-ida-otp_transaction.sql | 66 +++++++ .../ddl/archive-idrepo-anonymous_profile.sql | 45 +++++ ...chive-idrepo-credential_request_status.sql | 34 ++++ .../ddl/archive-idrepo-uin_draft.sql | 33 ++++ .../ddl/archive-kernel-otp_transaction.sql | 56 ++++++ .../archive-master-bulkupload_transaction.sql | 61 ++++++ .../ddl/archive-master-device_master_h.sql | 65 +++++++ .../ddl/archive-master-machine_master_h.sql | 48 +++++ .../archive-master-registration_center_h.sql | 101 ++++++++++ .../ddl/archive-master-user_detail_h.sql | 56 ++++++ .../ddl/archive-master-zone_user_h.sql | 44 +++++ .../ddl/archive-otp_transaction.sql | 62 ------ .../ddl/archive-pms-auth_policy_h.sql | 81 ++++++++ .../ddl/archive-pms-partner_h.sql | 45 +++++ ...chive-pms-secure_biometric_interface_h.sql | 37 ++++ .../ddl/archive-processed_prereg_list.sql | 41 ---- .../ddl/archive-reg_appointment_consumed.sql | 58 ------ .../ddl/archive-reg_demo_dedupe_list.sql | 48 ----- .../ddl/archive-reg_manual_verification.sql | 73 ------- ...archive-registered_authdevice_master_h.sql | 82 -------- .../archive-registered_regdevice_master_h.sql | 82 -------- .../ddl/archive-registration_transaction.sql | 72 ------- .../ddl/archive-regprc-abis_request.sql | 67 +++++++ .../ddl/archive-regprc-abis_response.sql | 51 +++++ .../ddl/archive-regprc-abis_response_det.sql | 35 ++++ .../archive-regprc-reg_demo_dedupe_list.sql | 38 ++++ ...rchive-regprc-registration_transaction.sql | 68 +++++++ .../ddl/archive-resident-otp_transaction.sql | 47 +++++ .../ddl/archive-resident_grievance_ticket.sql | 50 +++++ .../ddl/archive-resident_session.sql | 34 ++++ .../ddl/archive-resident_transaction.sql | 109 +++++++++++ .../ddl/archive-resident_user_actions.sql | 27 +++ .../ddl/archive-uin_biometric_h.sql | 61 ------ .../ddl/archive-uin_document_h.sql | 67 ------- .../mosip_archive/ddl/archive-uin_h.sql | 70 ------- .../mosip_archive/ddl/archive-vid.sql | 65 ------- .../mosip_archive/deploy.properties | 6 + .../db_scripts/mosip_archive/deploy.sh | 49 +++++ .../db_scripts/mosip_archive/grants.sql | 17 ++ .../mosip_archive/mosip_archive_db.sql | 27 --- .../mosip_archive/mosip_archive_db_deploy.sh | 112 ----------- .../mosip_archive_ddl_deploy.sql | 24 --- .../mosip_archive_deploy.properties | 15 -- .../mosip_archive/mosip_archive_grants.sql | 48 ----- .../mosip_archive/mosip_role_archiveuser.sql | 7 - .../mosip_archive/mosip_role_common.sql | 31 --- .../db_scripts/mosip_archive/role_dbuser.sql | 4 + data-archive/entrypoint.sh | 17 ++ data-archive/requirements.txt | 3 + 117 files changed, 2812 insertions(+), 3163 deletions(-) create mode 100644 .github/keys/mosipgpgkey_pub.gpg create mode 100644 .github/keys/mosipgpgkey_sec.gpg create mode 100644 .github/workflows/push-trigger.yml create mode 100644 .github/workflows/tag.yml create mode 100755 data-archive/.dockerignore create mode 100644 data-archive/Dockerfile create mode 100644 data-archive/archive-jobs/audit_archive_table_info.json create mode 100644 data-archive/archive-jobs/credential_archive_table_info.json create mode 100644 data-archive/archive-jobs/db.properties create mode 100644 data-archive/archive-jobs/esignet_archive_table_info.json create mode 100644 data-archive/archive-jobs/ida_archive_table_info.json create mode 100644 data-archive/archive-jobs/idrepo_archive_table_info.json create mode 100644 data-archive/archive-jobs/kernel_archive_table_info.json create mode 100644 data-archive/archive-jobs/master_archive_table_info.json create mode 100644 data-archive/archive-jobs/mosip_archive_main.py delete mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini delete mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py delete mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py delete mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py delete mode 100644 data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh delete mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini delete mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py delete mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py delete mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py delete mode 100644 data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh delete mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh delete mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini delete mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py delete mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py delete mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py delete mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py delete mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table5.py delete mode 100644 data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table6.py delete mode 100644 data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh delete mode 100644 data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini delete mode 100644 data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py create mode 100644 data-archive/archive-jobs/pms_archive_table_info.json create mode 100644 data-archive/archive-jobs/regprc_archive_table_info.json create mode 100644 data-archive/archive-jobs/resident_archive_table_info.json create mode 100755 data-archive/archive.sh create mode 100755 data-archive/db.sh create mode 100644 data-archive/db_scripts/mosip_archive/db.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-applications.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-audit-app_audit_log.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution_context.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution_params.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_instance.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_step_execution.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_step_execution_context.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-credential-credential_transaction.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-credential_event_store.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-esignet-consent_history.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-ida-anonymous_profile.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-ida-auth_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution_context.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution_params.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_instance.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_step_execution.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_step_execution_context.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-ida-credential_event_store.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-ida-otp_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-anonymous_profile.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-credential_request_status.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-uin_draft.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-kernel-otp_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-master-bulkupload_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-master-device_master_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-master-machine_master_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-master-registration_center_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-master-user_detail_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-master-zone_user_h.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-pms-auth_policy_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-pms-partner_h.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-pms-secure_biometric_interface_h.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_request.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_response.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_response_det.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-regprc-reg_demo_dedupe_list.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-regprc-registration_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-resident-otp_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-resident_grievance_ticket.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-resident_session.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-resident_transaction.sql create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-resident_user_actions.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql delete mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql create mode 100644 data-archive/db_scripts/mosip_archive/deploy.properties create mode 100755 data-archive/db_scripts/mosip_archive/deploy.sh create mode 100644 data-archive/db_scripts/mosip_archive/grants.sql delete mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_db.sql delete mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh delete mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql delete mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties delete mode 100644 data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql delete mode 100644 data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql delete mode 100644 data-archive/db_scripts/mosip_archive/mosip_role_common.sql create mode 100644 data-archive/db_scripts/mosip_archive/role_dbuser.sql create mode 100755 data-archive/entrypoint.sh create mode 100644 data-archive/requirements.txt diff --git a/.github/keys/mosipgpgkey_pub.gpg b/.github/keys/mosipgpgkey_pub.gpg new file mode 100644 index 00000000..86e5dfe3 --- /dev/null +++ b/.github/keys/mosipgpgkey_pub.gpg @@ -0,0 +1,30 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQENBF4Voj4BCADMadISO9yCDWMqaIvLSpLbazvjz7Y5Sqdzn7dW0dt1LLbxlnGw +PAq4THeI5/6plotK1sOMVFh7iy57kwYUgzKnWnW5hNDPtaTCWDmrOftiHm0Q/fj2 +Zsy3aX0XuySQz7oycTnQ52p3zqGDlnH7zgPQV9dCCAIQMFbJZkoNpZxMRMN2Lnph +112Oi7ujO5/gIcrxLBQXe9zHVU9DT1C1JjY4eTajU9vg+muslwhoT633vYvuhj1q +UBnn6ZLUitw+Z8d6S9B8SEqcbd5O3ODGKhToDK9tC8rZCDFgmgcZYAZnuaJ2/bQB +uePiCKf4jkGpdtpAB4HIr2lGvK2rWbW0l2KpABEBAAG0SG1vc2lwIChUaGlzIGlz +IG1vc2lwIG1hdmVuIGNlbnRyYWwgcmVwbykgPG1vc2lwLmVtYWlsbm90aWZpZXJA +Z21haWwuY29tPokBPwQTAQIAKQUCXhWiPgIbAwUJA8JnAAcLCQgHAwIBBhUIAgkK +CwQWAgMBAh4BAheAAAoJEE7nVSof0F5k2EAIAKdflLyH8H93IFYOWZQtte07Ctu5 +SgWbgBpBKUYquTFphDOhplMB5/cik5eY3nSwuhWa1u6bwq/R6GvlnsH5JUkmIM8o +EHCx4GqR5Zq8LoZazqrVycWBkZp7IjKA2kc1MyhLUTLmdPu9/moi78ZRABvSW624 +NViSxyoypAMl4hHj4qlL2IEDEyv2/OwKz+Tt7zyk6/mFzpte3MzasSdLDdt5zhIG +s+QofZI7IqQ1C/GSa3R13sliXd+9F/7+edyK10sigRCL5SsmM3Yz/QifeE5+Rd5K +9vx5NTLvCPoNddFhFpXpeD8N3JLdh88mg+AROsI1tJPAuxx/JaS335M5+9m5AQ0E +XhWiPgEIAKuUHcEfHTKInylOj93JeCStbBUC2aeajNZTASGdv62TlFTJsgtyHC7+ +b6UYXvdwBB+JbCa1Nv53uVpvl2nrA1KmEm2wwCEh3wQ3Q8k2KDQ2Jm0APpTNCdVy +cPjN1pDwgqwYDkCv8B9ZdC9CuFY/hvU4KOkgk+OYyiQNC2IwKt0tKoGM4eUb7eVt +lFQ9t+Q0araXzefTTYzcP2kSLd6ZIPNyhJTSRiRWRbXLyTRvMQAkl4kF9sp3K3qt +uB5Q9GGpP7NJmF4LB1AxKjYDf12nFT9fqfjzIqW8i3gBc6WgI1bW7V5VTw7rrqCr +AUrJCf3HJv67PJ07gJYvNqstCiZtvBkAEQEAAYkBJQQYAQIADwUCXhWiPgIbDAUJ +A8JnAAAKCRBO51UqH9BeZBFDB/9wZy0IsdD/byau5SjpsVNPDoS7Cu7Ojfd4jFDe +I7ze78DIgXWsIrbWRu18UdbXhFUNJZOEK3lyQOvxpN2cGjSyAHVe2Gir7g/5+qkM +M+28wpGU+OL7ejAfup8vHmAUh6otkthHPGuQ7N2oYk9JVKvm6OjPsWdMb54iJS2x +XQ8FTs3ooRS6/biukRrgoYmX62oDJD4CcCh3UvMUslXtC7L/FX+4hmPx/wWa+iOO +utA+zLgOcjS15SB+pH47JHwHB7NJWr/+nxmkbCU6tOMdyznSCTiYMoF3AKeuxcAs +pD454Eioeq+WZ6CyY6Y278Q/iSnVNP6T1La6CdONdaOBRpdA +=e6L0 +-----END PGP PUBLIC KEY BLOCK----- diff --git a/.github/keys/mosipgpgkey_sec.gpg b/.github/keys/mosipgpgkey_sec.gpg new file mode 100644 index 00000000..b0180d54 --- /dev/null +++ b/.github/keys/mosipgpgkey_sec.gpg @@ -0,0 +1,59 @@ +-----BEGIN PGP PRIVATE KEY BLOCK----- + +lQPGBF4Voj4BCADMadISO9yCDWMqaIvLSpLbazvjz7Y5Sqdzn7dW0dt1LLbxlnGw +PAq4THeI5/6plotK1sOMVFh7iy57kwYUgzKnWnW5hNDPtaTCWDmrOftiHm0Q/fj2 +Zsy3aX0XuySQz7oycTnQ52p3zqGDlnH7zgPQV9dCCAIQMFbJZkoNpZxMRMN2Lnph +112Oi7ujO5/gIcrxLBQXe9zHVU9DT1C1JjY4eTajU9vg+muslwhoT633vYvuhj1q +UBnn6ZLUitw+Z8d6S9B8SEqcbd5O3ODGKhToDK9tC8rZCDFgmgcZYAZnuaJ2/bQB +uePiCKf4jkGpdtpAB4HIr2lGvK2rWbW0l2KpABEBAAH+BwMCQtDwzCTcteP/Cbg9 +9DugKCnTbSsnUwAuA+oWUdkVCkrL6pvFIPXkgpCq7Af2E3JLgYnKK6AVsbDkHPS2 ++UeZRgfnoYZJBH59pHbjwQuz7nKF10BikDIQwc5+fNuIg+GgD3bfsRrRwQMkrl5/ +T3KVnw8I6TinGGsdA+UpODf61UQNYajuAq3P3GswpvbpYHLadHz5hmRjYzTRUnfX +jn26DhxRH6m6ulp630QHTvDCzI6TrlvgI4ZGaowBCGbBuDb+gUL3HaBtm8ZDokRH +pa5FtI1kpujigZuZ0AvmeFXOwZsAsPsC1msmnq5PlOULovuY9/8tEgPBjV1jee7u +9ZOnsdmO06SQ2o7FLL7sVZW8xK0WeRZMHiv/sm+yYI5q+EFJXYBpHMUO3Q+xwNk6 +64Uj1PnYLjTM0VRrt144LOpyGCuBBmeGqCVp1HTHshK2qSv+hZXFYowl0ia7BQpm +Lh2dkUQs2pa5WmD8XHJnPdXN5FOXJvnLNHgreAwklXk2+TDXT00Vb5dYfvZ1p2Cp +IWmx6gV8qUy9EQIHp0vTLJPbF1t2YQGvUrHdcDrHPW83gF6ydMmdtfSASfP47ZU9 +Xkyqj7WdyZz+q0heW1R5MUxrYYK0hkSN1wiB/SR+giOuEwJ8qX95cs1PuJXmYZrp +zwrR2oKMrzWR78zi2rDHoTQUEj/y4RU/wOQReMUHHljFv8ROfLGMs/zJUVKkq7uB +DODWFM1XA/uhpI8rwIyox3tiBf4v2PFzwotUAiOrpHej8LochTMoTxL4nglsAEbw +K9e0gzAzYyiR1EHBdIIsAY2pwkxZz52aFqL0KwxKtBFYWx/VApm+FcT0OQsJO3VU +YFR11x5pAf1cmfq36SoCiq5HQnyHRYNzJ9k2b1P6QK6as4+ZWSw3f/Mmfcbe4Swt +I+hFG77L8oOltEhtb3NpcCAoVGhpcyBpcyBtb3NpcCBtYXZlbiBjZW50cmFsIHJl +cG8pIDxtb3NpcC5lbWFpbG5vdGlmaWVyQGdtYWlsLmNvbT6JAT8EEwECACkFAl4V +oj4CGwMFCQPCZwAHCwkIBwMCAQYVCAIJCgsEFgIDAQIeAQIXgAAKCRBO51UqH9Be +ZNhACACnX5S8h/B/dyBWDlmULbXtOwrbuUoFm4AaQSlGKrkxaYQzoaZTAef3IpOX +mN50sLoVmtbum8Kv0ehr5Z7B+SVJJiDPKBBwseBqkeWavC6GWs6q1cnFgZGaeyIy +gNpHNTMoS1Ey5nT7vf5qIu/GUQAb0lutuDVYkscqMqQDJeIR4+KpS9iBAxMr9vzs +Cs/k7e88pOv5hc6bXtzM2rEnSw3bec4SBrPkKH2SOyKkNQvxkmt0dd7JYl3fvRf+ +/nncitdLIoEQi+UrJjN2M/0In3hOfkXeSvb8eTUy7wj6DXXRYRaV6Xg/DdyS3YfP +JoPgETrCNbSTwLscfyWkt9+TOfvZnQPFBF4Voj4BCACrlB3BHx0yiJ8pTo/dyXgk +rWwVAtmnmozWUwEhnb+tk5RUybILchwu/m+lGF73cAQfiWwmtTb+d7lab5dp6wNS +phJtsMAhId8EN0PJNig0NiZtAD6UzQnVcnD4zdaQ8IKsGA5Ar/AfWXQvQrhWP4b1 +OCjpIJPjmMokDQtiMCrdLSqBjOHlG+3lbZRUPbfkNGq2l83n002M3D9pEi3emSDz +coSU0kYkVkW1y8k0bzEAJJeJBfbKdyt6rbgeUPRhqT+zSZheCwdQMSo2A39dpxU/ +X6n48yKlvIt4AXOloCNW1u1eVU8O666gqwFKyQn9xyb+uzydO4CWLzarLQombbwZ +ABEBAAH+BwMCNYo5MHWGGLn/A7ZeeuCoZfgxQVb4pNZBvW94/Oqd7gP8Rb4MOqry +0cTzy8fr8fLvE2PRCVdhAMyvenSZKzKPz5+JWN9ZiE62Xgfy3Waq8Ij6HKf4K6zb +zwyPMwDKx8DySCxOPgvT/bYVQh9S2xGD9FGdYEcJXRJbOgbSlwLC/aJeS4NEuoMg +TzD5bp4bMnzO9BGnuQJ/NT7w91Bw6PssXEAUXj8wHHIeelBj+s7+du/LVWa7R2eh +2JWwAbFZQfD1mkxXS8M6qcHqeSwuvrngM1A7EBsCkOhiG4dB6hAELes9lDtNy/LU +QS6pOZdzd+hffISRPXOEyumZHwPtVTgeHO0zyM6C//bHu9UcecRL+SReZjcQDfYc +i3VKJeejDYvpUavhFLZe8dWZe57Cgx/fbgU5yHUDr6srphLquCTvqOHIIsuNAxEF +4sQNU88p2aBGqIhF9K/LYFvJCXHtkAs2KjU/uYZrmtSr2z6hj4ENvIsI0hLpzE/M +ltpypXZxMQ/MVRUq9KmQHFF7KLvkHkpL2cBEV9oFITLIkHhWjqT3XYy3YiLn/OVW +w8JGi5m7xcwy8peV8wD4v2eHKq1uJL4R36j6g9+TgFdRLQ6j7r/kC5IBJ1+LusEL +Z1mLD0Mr2tyAvZPYrC3TCxSlRstw8dlRgY96pDnGPApR+rOnFNc/hkrk6xsTSy+R +Povhr/kQljNrcHe5ofhtiRh5Zg3jQdjcs2OuKvDaGfAJnzkUrtm5t12jvzLylRrZ +9mbeHg4zF44bawMms00eRRn1+gzATmwGlnh+n78sr+ddJOCAo+qt5jRccs89BqB5 +8ADCRkD2jbF87pOjH81k8Tl9kkBzlxN7T/8MeH2zDLSim04Jw2b+XtCRFUE6kXp9 +nuXevQDiyMUHG0gEXXu/pQARhfOu+b77PDH1JtmJASUEGAECAA8FAl4Voj4CGwwF +CQPCZwAACgkQTudVKh/QXmQRQwf/cGctCLHQ/28mruUo6bFTTw6Euwruzo33eIxQ +3iO83u/AyIF1rCK21kbtfFHW14RVDSWThCt5ckDr8aTdnBo0sgB1Xthoq+4P+fqp +DDPtvMKRlPji+3owH7qfLx5gFIeqLZLYRzxrkOzdqGJPSVSr5ujoz7FnTG+eIiUt +sV0PBU7N6KEUuv24rpEa4KGJl+tqAyQ+AnAod1LzFLJV7Quy/xV/uIZj8f8Fmvoj +jrrQPsy4DnI0teUgfqR+OyR8BwezSVq//p8ZpGwlOrTjHcs50gk4mDKBdwCnrsXA +LKQ+OeBIqHqvlmegsmOmNu/EP4kp1TT+k9S2ugnTjXWjgUaXQA== +=6YF6 +-----END PGP PRIVATE KEY BLOCK----- diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml new file mode 100644 index 00000000..9e32653f --- /dev/null +++ b/.github/workflows/push-trigger.yml @@ -0,0 +1,40 @@ +name: Build data-archive + +on: + release: + types: [published] + pull_request: + types: [opened, reopened, synchronize] + workflow_dispatch: + inputs: + message: + description: 'Message for manually triggering' + required: false + default: 'Triggered for Updates' + type: string + push: + branches: + - master + - 1.* + - develop + - release* + - MOSIP* + +jobs: + build-dockers: + strategy: + matrix: + include: + - SERVICE_LOCATION: 'data-archive' + SERVICE_NAME: 'data-archive' + fail-fast: false + name: ${{ matrix.SERVICE_NAME }} + uses: mosip/kattu/.github/workflows/docker-build.yml@master + with: + SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} + SERVICE_NAME: ${{ matrix.SERVICE_NAME }} + secrets: + DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} + ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} + RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} diff --git a/.github/workflows/tag.yml b/.github/workflows/tag.yml new file mode 100644 index 00000000..c131f5aa --- /dev/null +++ b/.github/workflows/tag.yml @@ -0,0 +1,35 @@ +name: Tagging of repos + +on: + workflow_dispatch: + inputs: + TAG: + description: 'Tag to be published' + required: true + type: string + BODY: + description: 'Release body message' + required: true + default: 'Changes in this Release' + type: string + PRE_RELEASE: + description: 'Pre-release? True/False' + required: true + default: False + type: string + DRAFT: + description: 'Draft? True/False' + required: false + default: False + type: string + +jobs: + tag-branch: + uses: mosip/kattu/.github/workflows/tag.yml@master + with: + TAG: ${{ inputs.TAG }} + BODY: ${{ inputs.BODY }} + PRE_RELEASE: ${{ inputs.PRE_RELEASE }} + DRAFT: ${{ inputs.DRAFT }} + secrets: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} diff --git a/data-archive/.dockerignore b/data-archive/.dockerignore new file mode 100755 index 00000000..efd2b6a9 --- /dev/null +++ b/data-archive/.dockerignore @@ -0,0 +1,13 @@ +archive-jobs/db.properties +**/.git +**/.gitignore +**/.vscode +**/coverage +**/.env +**/.aws +**/.ssh +Dockerfile +README.md +**/.DS_Store +**/venv +**/env diff --git a/data-archive/Dockerfile b/data-archive/Dockerfile new file mode 100644 index 00000000..8c2d1fdf --- /dev/null +++ b/data-archive/Dockerfile @@ -0,0 +1,111 @@ +# Use a specific version of the Postgres image +FROM postgres:16 + +# Define build arguments and set labels +ARG SOURCE +ARG COMMIT_HASH +ARG COMMIT_ID +ARG BUILD_TIME +LABEL source=${SOURCE} \ + commit_hash=${COMMIT_HASH} \ + commit_id=${COMMIT_ID} \ + build_time=${BUILD_TIME} + +# Install required packages and clean up to reduce image size +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + apt-utils \ + python3 \ + python3-pip \ + python3-xlrd \ + python3.11-venv \ + curl \ + git && \ + rm -rf /var/lib/apt/lists/* + +# Set up virtual environment +RUN python3 -m venv /venv +ENV PATH="/venv/bin:$PATH" + +# Copy application code and install dependencies +WORKDIR /home/mosip/ +COPY . . +RUN pip3 install --no-cache-dir -r requirements.txt + +# Define environment variables with default values +ENV DB_SERVERIP= \ + DB_PORT= \ + SU_USER= \ + DBUSER_PWD= \ + MOSIP_DB_NAME= \ + DML_FLAG= \ + DB_NAMES= \ + AUDIT_SOURCE_DB_HOST= \ + AUDIT_SOURCE_DB_PORT= \ + AUDIT_SOURCE_DB_UNAME= \ + AUDIT_SOURCE_DB_PASS= \ + AUDIT_SOURCE_DB_NAME= \ + AUDIT_SOURCE_SCHEMA_NAME= \ + CREDENTIAL_SOURCE_DB_HOST= \ + CREDENTIAL_SOURCE_DB_PORT= \ + CREDENTIAL_SOURCE_DB_UNAME= \ + CREDENTIAL_SOURCE_DB_PASS= \ + CREDENTIAL_SOURCE_DB_NAME= \ + CREDENTIAL_SOURCE_SCHEMA_NAME= \ + ESIGNET_SOURCE_DB_HOST= \ + ESIGNET_SOURCE_DB_PORT= \ + ESIGNET_SOURCE_DB_UNAME= \ + ESIGNET_SOURCE_DB_PASS= \ + ESIGNET_SOURCE_DB_NAME= \ + ESIGNET_SOURCE_SCHEMA_NAME= \ + IDA_SOURCE_DB_HOST= \ + IDA_SOURCE_DB_PORT= \ + IDA_SOURCE_DB_UNAME= \ + IDA_SOURCE_DB_PASS= \ + IDA_SOURCE_DB_NAME= \ + IDA_SOURCE_SCHEMA_NAME= \ + IDREPO_SOURCE_DB_HOST= \ + IDREPO_SOURCE_DB_PORT= \ + IDREPO_SOURCE_DB_UNAME= \ + IDREPO_SOURCE_DB_PASS= \ + IDREPO_SOURCE_DB_NAME= \ + IDREPO_SOURCE_SCHEMA_NAME= \ + KERNEL_SOURCE_DB_HOST= \ + KERNEL_SOURCE_DB_PORT= \ + KERNEL_SOURCE_DB_UNAME= \ + KERNEL_SOURCE_DB_PASS= \ + KERNEL_SOURCE_DB_NAME= \ + KERNEL_SOURCE_SCHEMA_NAME= \ + MASTER_SOURCE_DB_HOST= \ + MASTER_SOURCE_DB_PORT= \ + MASTER_SOURCE_DB_UNAME= \ + MASTER_SOURCE_DB_PASS= \ + MASTER_SOURCE_DB_NAME= \ + MASTER_SOURCE_SCHEMA_NAME= \ + PMS_SOURCE_DB_HOST= \ + PMS_SOURCE_DB_PORT= \ + PMS_SOURCE_DB_UNAME= \ + PMS_SOURCE_DB_PASS= \ + PMS_SOURCE_DB_NAME= \ + PMS_SOURCE_SCHEMA_NAME= \ + REGPRC_SOURCE_DB_HOST= \ + REGPRC_SOURCE_DB_PORT= \ + REGPRC_SOURCE_DB_UNAME= \ + REGPRC_SOURCE_DB_PASS= \ + REGPRC_SOURCE_DB_NAME= \ + REGPRC_SOURCE_SCHEMA_NAME= \ + RESIDENT_SOURCE_DB_HOST= \ + RESIDENT_SOURCE_DB_PORT= \ + RESIDENT_SOURCE_DB_UNAME= \ + RESIDENT_SOURCE_DB_PASS= \ + RESIDENT_SOURCE_DB_NAME= \ + RESIDENT_SOURCE_SCHEMA_NAME= \ + ARCHIVE_DB_HOST= \ + ARCHIVE_DB_PORT= \ + ARCHIVE_DB_UNAME= \ + ARCHIVE_DB_PASS= \ + ARCHIVE_DB_NAME= \ + ARCHIVE_SCHEMA_NAME= + +# Set entrypoint +ENTRYPOINT ["./entrypoint.sh"] diff --git a/data-archive/archive-jobs/audit_archive_table_info.json b/data-archive/archive-jobs/audit_archive_table_info.json new file mode 100644 index 00000000..feebdca4 --- /dev/null +++ b/data-archive/archive-jobs/audit_archive_table_info.json @@ -0,0 +1,11 @@ +{ + "tables_info": [ + { + "source_table": "app_audit_log", + "archive_table": "mosip_audit_app_audit_log", + "id_column": "log_id", + "date_column": "log_dtimes", + "older_than_days": 30 + } + ] +} diff --git a/data-archive/archive-jobs/credential_archive_table_info.json b/data-archive/archive-jobs/credential_archive_table_info.json new file mode 100644 index 00000000..63efcd92 --- /dev/null +++ b/data-archive/archive-jobs/credential_archive_table_info.json @@ -0,0 +1,11 @@ +{ + "tables_info": [ + { + "source_table": "credential_transaction", + "archive_table": "mosip_credential_credential_transaction", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 30 + } + ] +} diff --git a/data-archive/archive-jobs/db.properties b/data-archive/archive-jobs/db.properties new file mode 100644 index 00000000..07020717 --- /dev/null +++ b/data-archive/archive-jobs/db.properties @@ -0,0 +1,34 @@ +[Databases] +DB_NAMES = AUDIT, CREDENTIAL, IDA + +[ARCHIVE] +ARCHIVE_DB_HOST=192.168.0.142 +ARCHIVE_DB_PORT=30091 +ARCHIVE_DB_NAME=mosip_archive +ARCHIVE_SCHEMA_NAME=archive +ARCHIVE_DB_UNAME=archiveuser +ARCHIVE_DB_PASS=mosip123 + +[AUDIT] +AUDIT_SOURCE_DB_HOST=192.168.0.142 +AUDIT_SOURCE_DB_PORT=30091 +AUDIT_SOURCE_DB_NAME=mosip_audit +AUDIT_SOURCE_SCHEMA_NAME=audit +AUDIT_SOURCE_DB_UNAME=audituser +AUDIT_SOURCE_DB_PASS=mosip123 + +[CREDENTIAL] +CREDENTIAL_SOURCE_DB_HOST=192.168.0.142 +CREDENTIAL_SOURCE_DB_PORT=30091 +CREDENTIAL_SOURCE_DB_NAME=mosip_credential +CREDENTIAL_SOURCE_SCHEMA_NAME=credential +CREDENTIAL_SOURCE_DB_UNAME=credentialuser +CREDENTIAL_SOURCE_DB_PASS=mosip123 + +[IDA] +IDA_SOURCE_DB_HOST=192.168.0.142 +IDA_SOURCE_DB_PORT=30091 +IDA_SOURCE_DB_NAME=mosip_ida +IDA_SOURCE_SCHEMA_NAME=ida +IDA_SOURCE_DB_UNAME=idauser +IDA_SOURCE_DB_PASS=mosip123 diff --git a/data-archive/archive-jobs/esignet_archive_table_info.json b/data-archive/archive-jobs/esignet_archive_table_info.json new file mode 100644 index 00000000..27718ee8 --- /dev/null +++ b/data-archive/archive-jobs/esignet_archive_table_info.json @@ -0,0 +1,11 @@ +{ + "tables_info": [ + { + "source_table": "consent_history", + "archive_table": "mosip_esignet_consent_history", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 30 + } + ] +} diff --git a/data-archive/archive-jobs/ida_archive_table_info.json b/data-archive/archive-jobs/ida_archive_table_info.json new file mode 100644 index 00000000..93a7e987 --- /dev/null +++ b/data-archive/archive-jobs/ida_archive_table_info.json @@ -0,0 +1,18 @@ +{ + "tables_info": [ + { + "source_table": "credential_event_store", + "archive_table": "mosip_ida_credential_event_store", + "id_column": "event_id", + "date_column": "cr_dtimes", + "older_than_days": 30 + }, + { + "source_table": "otp_transaction", + "archive_table": "mosip_ida_otp_transaction", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 30 + } + ] +} diff --git a/data-archive/archive-jobs/idrepo_archive_table_info.json b/data-archive/archive-jobs/idrepo_archive_table_info.json new file mode 100644 index 00000000..6df08992 --- /dev/null +++ b/data-archive/archive-jobs/idrepo_archive_table_info.json @@ -0,0 +1,25 @@ +{ + "tables_info": [ + { + "source_table": "anonymous_profile", + "archive_table": "mosip_idrepo_anonymous_profile", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 30 + }, + { + "source_table": "credential_request_status", + "archive_table": "mosip_idrepo_credential_request_status", + "id_column": "individual_id", + "date_column": "cr_dtimes", + "older_than_days": 30 + }, + { + "source_table": "uin_draft", + "archive_table": "mosip_idrepo_uin_draft", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 30 + } + ] +} diff --git a/data-archive/archive-jobs/kernel_archive_table_info.json b/data-archive/archive-jobs/kernel_archive_table_info.json new file mode 100644 index 00000000..00fa570a --- /dev/null +++ b/data-archive/archive-jobs/kernel_archive_table_info.json @@ -0,0 +1,11 @@ +{ + "tables_info": [ + { + "source_table": "otp_transaction", + "archive_table": "mosip_kernel_otp_transaction", + "id_column": "id", + "date_column": "generated_dtimes", + "older_than_days": 7 + } + ] +} diff --git a/data-archive/archive-jobs/master_archive_table_info.json b/data-archive/archive-jobs/master_archive_table_info.json new file mode 100644 index 00000000..f19588dd --- /dev/null +++ b/data-archive/archive-jobs/master_archive_table_info.json @@ -0,0 +1,46 @@ +{ + "tables_info": [ + { + "source_table": "bulkupload_transaction", + "archive_table": "mosip_master_bulkupload_transaction", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 91 + }, + { + "source_table": "device_master_h", + "archive_table": "mosip_master_device_master_h", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 365 + }, + { + "source_table": "machine_master_h", + "archive_table": "mosip_master_machine_master_h", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 183 + }, + { + "source_table": "registration_center_h", + "archive_table": "mosip_master_registration_center_h", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 365 + }, + { + "source_table": "user_detail_h", + "archive_table": "mosip_master_user_detail_h", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 183 + }, + { + "source_table": "zone_user_h", + "archive_table": "mosip_master_zone_user_h", + "id_column": "usr_id", + "date_column": "cr_dtimes", + "older_than_days": 183 + } + ] +} diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py new file mode 100644 index 00000000..f218b90e --- /dev/null +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -0,0 +1,153 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +import sys +import psycopg2 +import configparser +import datetime +import os +import json +from datetime import datetime + +def config(): + config = configparser.ConfigParser() + if os.path.exists('db.properties'): + config.read('db.properties') + archive_param = {key.upper(): config['ARCHIVE'][key] for key in config['ARCHIVE']} + source_param = {db_name: {key.upper(): config[db_name][key] for key in config[db_name]} for db_name in config.sections() if db_name != 'ARCHIVE'} + + # Read database names from properties file + db_names = config.get('Databases', 'DB_NAMES').split(',') + db_names = [name.strip() for name in db_names] # Strip leading and trailing spaces + print("db.properties file found and loaded.") + else: + print("db.properties file not found. Using environment variables.") + archive_param = { + 'ARCHIVE_DB_HOST': os.environ.get('ARCHIVE_DB_HOST'), + 'ARCHIVE_DB_PORT': os.environ.get('ARCHIVE_DB_PORT'), + 'ARCHIVE_DB_NAME': os.environ.get('ARCHIVE_DB_NAME'), + 'ARCHIVE_SCHEMA_NAME': os.environ.get('ARCHIVE_SCHEMA_NAME'), + 'ARCHIVE_DB_UNAME': os.environ.get('ARCHIVE_DB_UNAME'), + 'ARCHIVE_DB_PASS': os.environ.get('ARCHIVE_DB_PASS') + } + db_names_env = os.environ.get('DB_NAMES') + if db_names_env is not None: + db_names = db_names_env.split(',') + db_names = [name.strip() for name in db_names] # Strip leading and trailing spaces + else: + print("Error: DB_NAMES not found in properties file or environment variables.") + sys.exit(1) + + source_param = {} + for db_name in db_names: + source_param[db_name] = { + f'{db_name}_SOURCE_DB_HOST': os.environ.get(f'{db_name}_SOURCE_DB_HOST'), + f'{db_name}_SOURCE_DB_PORT': os.environ.get(f'{db_name}_SOURCE_DB_PORT'), + f'{db_name}_SOURCE_DB_NAME': os.environ.get(f'{db_name}_SOURCE_DB_NAME'), + f'{db_name}_SOURCE_SCHEMA_NAME': os.environ.get(f'{db_name}_SOURCE_SCHEMA_NAME'), + f'{db_name}_SOURCE_DB_UNAME': os.environ.get(f'{db_name}_SOURCE_DB_UNAME'), + f'{db_name}_SOURCE_DB_PASS': os.environ.get(f'{db_name}_SOURCE_DB_PASS') + } + return db_names, archive_param, source_param + +def getValues(row): + finalValues = "" + for value in row: + if value is None: + finalValues += "NULL," + else: + finalValues += "'" + str(value) + "'," + finalValues = finalValues[:-1] + return finalValues + +def read_tables_info(db_name): + try: + with open('{}_archive_table_info.json'.format(db_name.lower())) as f: + tables_info = json.load(f) + print("{}_archive_table_info.json file found and loaded.".format(db_name.lower())) + return tables_info['tables_info'] + except FileNotFoundError: + print("{}_archive_table_info.json file not found. Using environment variables.".format(db_name.lower())) + tables_info = os.environ.get("{}_archive_table_info".format(db_name.lower())) + if tables_info is None: + print("Environment variable {}_archive_table_info not found.".format(db_name.lower())) + sys.exit(1) + return json.loads(tables_info)['tables_info'] + +def dataArchive(db_name, dbparam, tables_info): + sourceConn = None + archiveConn = None + sourceCur = None + archiveCur = None + try: + print('Connecting to the PostgreSQL database...') + sourceConn = psycopg2.connect( + user=dbparam["{}_SOURCE_DB_UNAME".format(db_name)], + password=dbparam["{}_SOURCE_DB_PASS".format(db_name)], + host=dbparam["{}_SOURCE_DB_HOST".format(db_name)], + port=dbparam["{}_SOURCE_DB_PORT".format(db_name)], + database=dbparam["{}_SOURCE_DB_NAME".format(db_name)] + ) + archiveConn = psycopg2.connect( + user=dbparam["ARCHIVE_DB_UNAME"], + password=dbparam["ARCHIVE_DB_PASS"], + host=dbparam["ARCHIVE_DB_HOST"], + port=dbparam["ARCHIVE_DB_PORT"], + database=dbparam["ARCHIVE_DB_NAME"] + ) + sourceCur = sourceConn.cursor() + archiveCur = archiveConn.cursor() + sschemaName = dbparam["{}_SOURCE_SCHEMA_NAME".format(db_name)] + aschemaName = dbparam["ARCHIVE_SCHEMA_NAME"] + + # Loop through the list of table_info dictionaries + for table_info in tables_info: + source_table_name = table_info['source_table'] + archive_table_name = table_info['archive_table'] + id_column = table_info['id_column'] + if 'date_column' in table_info and 'older_than_days' in table_info: + date_column = table_info['date_column'] + older_than_days = table_info['older_than_days'] + select_query = "SELECT * FROM {0}.{1} WHERE {2} < NOW() - INTERVAL '{3} days'".format(sschemaName, source_table_name, date_column, older_than_days) + else: + select_query = "SELECT * FROM {0}.{1}".format(sschemaName, source_table_name) + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(select_count, ": Record(s) selected for archive from", source_table_name) + if select_count > 0: + for row in rows: + rowValues = getValues(row) + insert_query = "INSERT INTO {0}.{1} VALUES ({2}) ON CONFLICT DO NOTHING".format(aschemaName, archive_table_name, rowValues) + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + if insert_count == 0: + print("Skipping duplicate record with ID:", row[0]) + else: + print(insert_count, ": Record inserted successfully") + delete_query = 'DELETE FROM "{0}"."{1}" WHERE "{2}" = %s'.format(sschemaName, source_table_name, id_column) + sourceCur.execute(delete_query, (row[0],)) + sourceConn.commit() + delete_count = sourceCur.rowcount + print(delete_count, ": Record(s) deleted successfully") + except (Exception, psycopg2.DatabaseError) as error: + print("Error during data archiving:", error) + finally: + if sourceCur is not None: + sourceCur.close() + if sourceConn is not None: + sourceConn.close() + print('Source database connection closed.') + if archiveCur is not None: + archiveCur.close() + if archiveConn is not None: + archiveConn.close() + print('Archive database connection closed.') + +if __name__ == '__main__': + db_names, archive_param, source_param = config() + for db_name in db_names: + dbparam = source_param[db_name] + dbparam.update(archive_param) + tables_info = read_tables_info(db_name) + dataArchive(db_name, dbparam, tables_info) diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini deleted file mode 100644 index 1aa68ddc..00000000 --- a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida.ini +++ /dev/null @@ -1,20 +0,0 @@ -[MOSIP-DB-SECTION] -source_db_serverip=13.233.223.29 -source_db_port=30090 -source_db_name=mosip_ida -source_schema_name=ida -source_db_uname=idacuser -source_db_pass=Mosip@dev123 -archive_table1=auth_transaction -archive_table2=otp_transaction - -archive_table3=credential_event_store - -archive_db_serverip=13.233.223.29 -archive_db_port=30090 -archive_db_name=mosip_archive -archive_schema_name=archive -archive_db_uname=archiveuser -archive_db_pass=Mosip@dev123 - -archive_older_than_days = 2 diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py deleted file mode 100644 index e1f4b8ea..00000000 --- a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table1.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : ID Authentication DB Tables Archive -#-- DB Name : mosip_ida -#-- Table Names : auth_transaction -#-- Purpose : Job to Archive Data in ID Authentication DB for above mentioned tables -#-- Create By : Sadanandegowda DM -#-- Created Date : Dec-2020 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_ida.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table1"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py deleted file mode 100644 index 25950a2b..00000000 --- a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table2.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : ID Authentication DB Tables Archive -#-- DB Name : mosip_ida -#-- Table Names : otp_transaction -#-- Purpose : Job to Archive Data in ID Authentication DB for above mentioned tables -#-- Create By : Sadanandegowda DM -#-- Created Date : Dec-2020 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_ida.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table2"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py b/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py deleted file mode 100644 index 2fc64647..00000000 --- a/data-archive/archive-jobs/mosip_ida/mosip_archive_ida_table3.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : ID Authentication DB Tables Archive -#-- DB Name : mosip_ida -#-- Table Names : credential_event_store -#-- Purpose : Job to Archive Data in ID Authentication DB for above mentioned tables -#-- Create By : Ram Bhatt -#-- Created Date : Oct-2021 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_ida.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table3"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE event_id ='"+row[0]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh b/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh deleted file mode 100644 index 1ccd8beb..00000000 --- a/data-archive/archive-jobs/mosip_ida/mosip_archive_job_ida.sh +++ /dev/null @@ -1,24 +0,0 @@ -### -- --------------------------------------------------------------------------------------------------------- -### -- Script Name : IDA Archive Job -### -- Deploy Module : IDA -### -- Purpose : To Archive IDA tables which are marked for archive. -### -- Create By : Sadanandegowda DM -### -- Created Date : Dec-2020 -### -- -### -- Modified Date Modified By Comments / Remarks -### -- ---------------------------------------------------------------------------------------- -### -- Oct-2021 Ram Bhatt Added archival scripts for credential_event_store -### -- -------------------------------------------------------------------------------------------- - - - -python mosip_archive_ida_table1.py & -sleep 5m - -python mosip_archive_ida_table2.py & -sleep 5m - -python mosip_archive_ida_table3.py & - - -#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini deleted file mode 100644 index ec62c41d..00000000 --- a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo.ini +++ /dev/null @@ -1,20 +0,0 @@ -[MOSIP-DB-SECTION] -source_db_serverip=13.233.223.29 -source_db_port=30090 -source_db_name=mosip_idrepo -source_schema_name=idrepo -source_db_uname=idrepouser -source_db_pass=Mosip@dev123 -archive_table1=uin_h -archive_table2=uin_biometric_h -archive_table3=uin_document_h - - -archive_db_serverip=13.233.223.29 -archive_db_port=30090 -archive_db_name=mosip_archive -archive_schema_name=archive -archive_db_uname=archiveuser -archive_db_pass=Mosip@dev123 - -archive_older_than_days = 2 \ No newline at end of file diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py deleted file mode 100644 index 0f51685b..00000000 --- a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table1.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : ID Repository DB Tables Archive -#-- DB Name : mosip_idrepo -#-- Table Names : uin_h -#-- Purpose : Job to Archive Data in ID Repository DB for above mentioned tables -#-- Create By : Sadanandegowda DM -#-- Created Date : Dec-2020 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table1"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE uin_ref_id ='"+row[0]+"'AND eff_dtimes='"+row[1]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py deleted file mode 100644 index ba6f98f7..00000000 --- a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table2.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : ID Repository DB Tables Archive -#-- DB Name : mosip_idrepo -#-- Table Names : uin_biometric_h -#-- Purpose : Job to Archive Data in ID Repository DB for above mentioned tables -#-- Create By : Sadanandegowda DM -#-- Created Date : Dec-2020 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table2"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE uin_ref_id ='"+row[0]+"'AND biometric_file_type='"+row[1]+"'AND eff_dtimes='"+row[2]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py deleted file mode 100644 index e73ed331..00000000 --- a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_idrepo_table3.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : ID Repository DB Tables Archive -#-- DB Name : mosip_idrepo -#-- Table Names : uin_document_h -#-- Purpose : Job to Archive Data in ID Repository DB for above mentioned tables -#-- Create By : Sadanandegowda DM -#-- Created Date : Dec-2020 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table3"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE uin_ref_id ='"+row[0]+"'AND doccat_code='"+row[1]+"'AND eff_dtimes='"+row[3]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh b/data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh deleted file mode 100644 index 53433df9..00000000 --- a/data-archive/archive-jobs/mosip_idrepo/mosip_archive_job_idrepo.sh +++ /dev/null @@ -1,19 +0,0 @@ -### -- --------------------------------------------------------------------------------------------------------- -### -- Script Name : ID Repository Archive Job -### -- Deploy Module : Pre registration -### -- Purpose : To Archive ID Repository tables which are marked for archive. -### -- Create By : Sadanandegowda DM -### -- Created Date : Dec-2020 -### -- -### -- Modified Date Modified By Comments / Remarks -### -- ---------------------------------------------------------------------------------------- - -python mosip_archive_idrepo_table1.py & -sleep 5m - -python mosip_archive_idrepo_table2.py & -sleep 5m - -python mosip_archive_idrepo_table3.py & - -#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh b/data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh deleted file mode 100644 index 31b35123..00000000 --- a/data-archive/archive-jobs/mosip_prereg/mosip_archive_job_prereg.sh +++ /dev/null @@ -1,22 +0,0 @@ -### -- --------------------------------------------------------------------------------------------------------- -### -- Script Name : Pre Registration Archive Job -### -- Deploy Module : Pre registration -### -- Purpose : To Archive Pre Registration tables which are marked for archive. -### -- Create By : Sadanandegowda DM -### -- Created Date : Dec-2020 -### -- -### -- Modified Date Modified By Comments / Remarks -### -- ---------------------------------------------------------------------------------------- - -python mosip_archive_prereg_table1.py & -sleep 5m - -python mosip_archive_prereg_table2.py & -sleep 5m - -python mosip_archive_prereg_table3.py & -sleep 5m - -python mosip_archive_prereg_table4.py & - -#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini deleted file mode 100644 index 054377ec..00000000 --- a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg.ini +++ /dev/null @@ -1,23 +0,0 @@ -[MOSIP-DB-SECTION] -source_db_serverip=13.233.223.29 -source_db_port=30090 -source_db_name=mosip_prereg -source_schema_name=prereg -source_db_uname=prereguser -source_db_pass=Mosip@dev123 -archive_table1=applicant_demographic_consumed -archive_table2=applicant_document_consumed -archive_table3=reg_appointment_consumed -archive_table4=processed_prereg_list - -archive_table5=applications -archive_table6=otp_transaction - -archive_db_serverip=13.233.223.29 -archive_db_port=30090 -archive_db_name=mosip_archive -archive_schema_name=archive -archive_db_uname=archiveuser -archive_db_pass=Mosip@dev123 - -archive_older_than_days = 2 diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py deleted file mode 100644 index ebf1bbe7..00000000 --- a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table1.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : Pre Registration DB Tables Archive -#-- DB Name : mosip_prereg -#-- Table Names : applicant_demographic_consumed -#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables -#-- Create By : Sadanandegowda DM -#-- Created Date : Dec-2020 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table1"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE prereg_id ='"+row[0]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py deleted file mode 100644 index db2fca1e..00000000 --- a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table2.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : Pre Registration DB Tables Archive -#-- DB Name : mosip_prereg -#-- Table Names : applicant_document_consumed -#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables -#-- Create By : Sadanandegowda DM -#-- Created Date : Dec-2020 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table2"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py deleted file mode 100644 index d1422594..00000000 --- a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table3.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : Pre Registration DB Tables Archive -#-- DB Name : mosip_prereg -#-- Table Names : applicant_appointment_consumed -#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables -#-- Create By : Sadanandegowda DM -#-- Created Date : Dec-2020 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table3"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py deleted file mode 100644 index ebf1bbe7..00000000 --- a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table4.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : Pre Registration DB Tables Archive -#-- DB Name : mosip_prereg -#-- Table Names : applicant_demographic_consumed -#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables -#-- Create By : Sadanandegowda DM -#-- Created Date : Dec-2020 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table1"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE prereg_id ='"+row[0]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table5.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table5.py deleted file mode 100644 index bb2cc66a..00000000 --- a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table5.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : Pre Registration DB Tables Archive -#-- DB Name : mosip_prereg -#-- Table Names : applications -#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables -#-- Create By : Ram Bhatt -#-- Created Date : Oct-2021 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table5"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE application_status_code = 'BOOKED' AND cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE prereg_id ='"+row[0]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table6.py b/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table6.py deleted file mode 100644 index 48b39b2f..00000000 --- a/data-archive/archive-jobs/mosip_prereg/mosip_archive_prereg_table6.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : Pre Registration DB Tables Archive -#-- DB Name : mosip_prereg -#-- Table Names : otp_transaction -#-- Purpose : Job to Archive Data in pre registration DB for above mentioned tables -#-- Create By : Ram Bhatt -#-- Created Date : Oct-2021 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table6"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE prereg_id ='"+row[0]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh b/data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh deleted file mode 100644 index cfa6acbf..00000000 --- a/data-archive/archive-jobs/mosip_regprc/mosip_archive_job_regprc.sh +++ /dev/null @@ -1,13 +0,0 @@ -### -- --------------------------------------------------------------------------------------------------------- -### -- Script Name : Registration Processor Archive Job -### -- Deploy Module : Pre registration -### -- Purpose : To Archive Registration Processor tables which are marked for archive. -### -- Create By : Sadanandegowda DM -### -- Created Date : Dec-2020 -### -- -### -- Modified Date Modified By Comments / Remarks -### -- ---------------------------------------------------------------------------------------- - -python mosip_archive_regprc_table1.py & - -#=============================================================================================== diff --git a/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini b/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini deleted file mode 100644 index ff68deda..00000000 --- a/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc.ini +++ /dev/null @@ -1,17 +0,0 @@ -[MOSIP-DB-SECTION] -source_db_serverip=13.233.223.29 -source_db_port=30090 -source_db_name=mosip_regprc -source_schema_name=regprc -source_db_uname=regprcuser -source_db_pass=Mosip@dev123 -archive_table1=registration_transaction - -archive_db_serverip=13.233.223.29 -archive_db_port=30090 -archive_db_name=mosip_archive -archive_schema_name=archive -archive_db_uname=archiveuser -archive_db_pass=Mosip@dev123 - -archive_older_than_days = 2 \ No newline at end of file diff --git a/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py b/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py deleted file mode 100644 index a58aabb2..00000000 --- a/data-archive/archive-jobs/mosip_regprc/mosip_archive_regprc_table1.py +++ /dev/null @@ -1,107 +0,0 @@ -#-- ------------------------------------------------------------------------------------------------- -#-- Job Name : Registration Processor DB Tables Archive -#-- DB Name : mosip_regprc -#-- Table Names : registration_transaction -#-- Purpose : Job to Archive Data in registration processor DB for above mentioned tables -#-- Create By : Sadanandegowda DM -#-- Created Date : Dec-2020 -#-- -#-- Modified Date Modified By Comments / Remarks -#-- ------------------------------------------------------------------------------------------ -#-- -#-- ------------------------------------------------------------------------------------------ - -#!/usr/bin/python -# -*- coding: utf-8 -*- -import sys - -import configparser -import psycopg2 -import datetime - -from configparser import ConfigParser -from datetime import datetime - -def config(filename='mosip_archive_prereg.ini', section='MOSIP-DB-SECTION'): - parser = ConfigParser() - parser.read(filename) - dbparam = {} - if parser.has_section(section): - params = parser.items(section) - for param in params: - dbparam[param[0]] = param[1] - else: - raise Exception('Section {0} not found in the {1} file'.format(section, filename)) - - return dbparam - -def getValues(row): - finalValues ="" - for values in row: - finalValues = finalValues+"'"+str(values)+"'," - - finalValues = finalValues[0:-1] - return finalValues - -def dataArchive(): - sourseConn = None - archiveConn = None - try: - - dbparam = config() - - print('Connecting to the PostgreSQL database...') - sourseConn = psycopg2.connect(user=dbparam["source_db_uname"], - password=dbparam["source_db_pass"], - host=dbparam["source_db_serverip"], - port=dbparam["source_db_port"], - database=dbparam["source_db_name"]) - archiveConn = psycopg2.connect(user=dbparam["archive_db_uname"], - password=dbparam["archive_db_pass"], - host=dbparam["archive_db_serverip"], - port=dbparam["archive_db_port"], - database=dbparam["archive_db_name"]) - - sourceCur = sourseConn.cursor() - archiveCur = archiveConn.cursor() - - tableName=dbparam["archive_table1"] - sschemaName = dbparam["source_schema_name"] - aschemaName = dbparam["archive_schema_name"] - oldDays = dbparam["archive_older_than_days"] - - print(tableName) - select_query = "SELECT * FROM "+sschemaName+"."+tableName+" WHERE cr_dtimes < NOW() - INTERVAL '"+oldDays+" days'" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(select_count, ": Record selected for archive from ", tableName) - if select_count > 0: - for row in rows: - rowValues = getValues(row) - insert_query = "INSERT INTO "+aschemaName+"."+tableName+" VALUES ("+rowValues+")" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - print(insert_count, ": Record inserted successfully ") - if insert_count > 0: - delete_query = "DELETE FROM "+sschemaName+"."+tableName+" WHERE id ='"+row[0]+"'" - sourceCur.execute(delete_query) - sourseConn.commit() - delete_count = sourceCur.rowcount - print(delete_count, ": Record deleted successfully") - - except (Exception, psycopg2.DatabaseError) as error: - print(error) - finally: - if sourseConn is not None: - sourceCur.close() - sourseConn.close() - print('Database sourse connection closed.') - if archiveConn is not None: - archiveCur.close() - archiveConn.close() - print('Database archive connection closed.') - -if __name__ == '__main__': - dataArchive() diff --git a/data-archive/archive-jobs/pms_archive_table_info.json b/data-archive/archive-jobs/pms_archive_table_info.json new file mode 100644 index 00000000..ca1fef84 --- /dev/null +++ b/data-archive/archive-jobs/pms_archive_table_info.json @@ -0,0 +1,25 @@ +{ + "tables_info": [ + { + "source_table": "auth_policy_h", + "archive_table": "mosip_pms_auth_policy_h", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 183 + }, + { + "source_table": "secure_biometric_interface_h", + "archive_table": "mosip_pms_secure_biometric_interface_h", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 183 + }, + { + "source_table": "partner_h", + "archive_table": "mosip_pms_partner_h", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 183 + } + ] +} diff --git a/data-archive/archive-jobs/regprc_archive_table_info.json b/data-archive/archive-jobs/regprc_archive_table_info.json new file mode 100644 index 00000000..870d6669 --- /dev/null +++ b/data-archive/archive-jobs/regprc_archive_table_info.json @@ -0,0 +1,39 @@ +{ + "tables_info": [ + { + "source_table": "abis_response_det", + "archive_table": "mosip_regprc_abis_response_det", + "id_column": "abis_resp_id", + "date_column": "cr_dtimes", + "older_than_days": 183 + }, + { + "source_table": "abis_response", + "archive_table": "mosip_regprc_abis_response", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 183 + }, + { + "source_table": "abis_request", + "archive_table": "mosip_regprc_abis_request", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 183 + }, + { + "source_table": "reg_demo_dedupe_list", + "archive_table": "mosip_regprc_reg_demo_dedupe_list", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 183 + }, + { + "source_table": "registration_transaction", + "archive_table": "mosip_regprc_registration_transaction", + "id_column": "regtrn_id", + "date_column": "cr_dtimes", + "older_than_days": 183 + } + ] +} diff --git a/data-archive/archive-jobs/resident_archive_table_info.json b/data-archive/archive-jobs/resident_archive_table_info.json new file mode 100644 index 00000000..1b1df885 --- /dev/null +++ b/data-archive/archive-jobs/resident_archive_table_info.json @@ -0,0 +1,39 @@ +{ + "tables_info": [ + { + "source_table": "otp_transaction", + "archive_table": "mosip_resident_otp_transaction", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 30 + }, + { + "source_table": "resident_grievance_ticket", + "archive_table": "mosip_resident_grievance_ticket", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 365 + }, + { + "source_table": "resident_session", + "archive_table": "mosip_resident_session", + "id_column": "session_id", + "date_column": "login_dtimes", + "older_than_days": 30 + }, + { + "source_table": "resident_transaction", + "archive_table": "mosip_resident_transaction", + "id_column": "id", + "date_column": "cr_dtimes", + "older_than_days": 365 + }, + { + "source_table": "resident_user_actions", + "archive_table": "mosip_resident_user_actions", + "id_column": "ida_token", + "date_column": "last_bell_notif_click_dtimes", + "older_than_days": 365 + } + ] +} diff --git a/data-archive/archive.sh b/data-archive/archive.sh new file mode 100755 index 00000000..3f3857e2 --- /dev/null +++ b/data-archive/archive.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +# Exit immediately if a command exits with a non-zero status +set -e + +# Function to handle errors +handle_error() { + local exit_code="$?" + echo "Error occurred in script at line $BASH_LINENO with exit code $exit_code" + # Add additional error handling or cleanup here if needed + exit $exit_code +} + +# Trap errors and call the handle_error function +trap 'handle_error' ERR + +echo "Executing archive-jobs sequentilay which was mentioned in DB_NAMES" + +cd archive-jobs + +python3 mosip_archive_main.py + +echo "Executed archive-jobs successfully" diff --git a/data-archive/db.sh b/data-archive/db.sh new file mode 100755 index 00000000..81d2fe0f --- /dev/null +++ b/data-archive/db.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# entrypoint.sh + +# Function to handle errors +handle_error() { + echo "Error occurred in script at line $1. Exiting." + exit 1 +} + +# Trap errors and execute the handle_error function +trap 'handle_error $LINENO' ERR + +# Exit immediately if any command exits with a non-zero status +set -e + +echo "Executing dbscript" + +cd db_scripts/mosip_archive + +# Execute the deployment script without the properties file, as it was passed by arguments +bash deploy.sh + +echo "Executed successfully" diff --git a/data-archive/db_scripts/README.MD b/data-archive/db_scripts/README.MD index 4d2ff543..90735ba5 100644 --- a/data-archive/db_scripts/README.MD +++ b/data-archive/db_scripts/README.MD @@ -1,178 +1,8 @@ -## MOSIP Commons module Databases (**mosip_master, mosip_kernel, mosip_idrepo, mosip_idmap, mosip_iam, mosip_audit**) scripts inventory and deployment guidelines on postgresql database. +# Pre-regstration Database -#### The details disclosed below gives a clear information on complete database script structure with the instructions for database scripts deployments. +## Overview +This folder containers various SQL scripts to create database and tables in postgres. The tables are described under `/ddl/`. Default data that's populated in the tables is present under `/dml` folder -## Prerequisities +This folder containers various SQL scripts to create database and tables in postgres. These scripts are automatically run with as part of DB initialisation in [Sandbox Deployment](https://docs.mosip.io/1.2.0/deployment/sandbox-deployment) -* DB Server and access details - -* Postgres client (psql) has to be installed on the deployment servers. - -* Copy latest database scripts(DDL, DML, .SH ... etc) from git/repository on to the DB deployment server. - -* Necessary details to be updated in peoperties file against to the releavnt variables being used (details listed below). - -* Database objects related to MOSIP modules are placed in "**mosip_base_directory**>>db_scripts>>mosip_ folder on git/repository - -**Example:** the commons module script folder is /**mosip_base_directory**>>db_scripts>>mosip_kernel where all the database scripts related to kernel are available. - -* Create a log file directory on DB deployment server before updating the properties file. Please follow the steps to create the same: - - bash-4.2$mkdir /mosip_base_directory/ - -* If we wish to place the log files under different directory other than the above mentioned then we need to create directory and specify the path of the directory in the properties file. - -* Pull the DB deployment scripts from Git repository to the deployment server and start deploying OR - -* If are pulling to local system from Git repository and pushing them back to deployment server using WinSCP then make a note to modify the following encoding settings in WinSCP before pushing the files to deployment server --> Open WinSCP --> Options --> Preferences --> Transfer --> Edit --> In "Transfer mode" section --> select "Text" --> Click Ok --> Click Ok - -## Each database folder has the following files / folders - -* **ddl folder:** This folder contains all the database data definition language (DDL) scripts to create or alter a database object of this module. - -* **dml folder:** This folder contains the scripts (insert/update/delete scripts) to create seed data / metadata needed to run this module. - -* **mosip__db.sql:** This file contains the database creation script of this module - -* **mosip__grants.sql:** The needed privilege / grants scripts assigned to database user / role to access database objects are described in this file. - -* **mosip_role_user.sql:** The role creation script that will be used by the application to perform DML operations is defined here. - -* **mosip_role_common.sql:** This file contains the common roles creation script that are needed to manage the database. - -* **mosip__ddl_deploy.sql:** This is a wrapper script used to **deploy the DDL scripts available in ddl folder**. This will also be used to prepare the script run sequence to manage all the needed dependency across DB objects being created. - -* **mosip__dml_deploy.sql:** This is a wrapper script used to **deploy the DML scripts available in dml folder**. This will also used to prepare the script run sequence to manage all the needed dependency across DB objects. - -* **mosip__db_deploy.sh:** This is the shell script available and present in each database folders/directories. - -* **mosip__deploy.properties:** This is the properties file name and present in each database. - -* **mosip_commons_db_deployment.sh:** This is the .sh file which is present in /home/madmin/database directory and which will be executed for all commons database deployment in single command execution. - -**Note :** Not all Modules will have dml scripts. Make necessary changes in the properties file with dml variables for the modules where dml exists. - -**Note :** No need to change anything in the shell script unless it is really causing any problem or any further implementation is being introduced. - -Once we complete with sourcing the database files, we need to follow the below DB deployment process with the modifying the properties file according the requirement. - -## Deployment can be performed in two ways based on the requirement and they are as follows: -1) DB Deployment for all common module databases -2) DB Deployment for single or selected databases - -### Properties file variable details and description: Properties file has to be updated with the required details before proceeding with deployment steps for each databases. - -**DB_SERVERIP:** Contains details of Destination DB SERVER_IP(Ex:10.0.0.1) where the deployment is targeted - -**DB_PORT:** Contains the postgres server port details where the postgres is allowed to connect. Ex: 5433 - -**SU_USER:** Contains the postgres super user name to connect to the postgres database i.e. postgres - -**SU_USER_PWD:** Contains the password for postgres super user - -**DEFAULT_DB_NAME:** Default database name to connect with respective postgres server i.e. ex: postgres - -**MOSIP_DB_NAME:** MOSIP Database name for which the deployment is scheduled. - -**SYSADMIN_USER:** This variable contains the mosip_common_role which indeed is going to be the super user for the remaining actions going to be performed by shell script. - -**SYSADMIN_PWD:** Contains the credential details for SYSADMIN_USER. - -**DBADMIN_PWD:** Contains the credential details for DBADMIN_USER. - -**APPADMIN_PWD:** Contains the credential details for APPADMIN_USER. - -**DBUSER_PWD:** Contains the credential details for dbuserpwd. - -**BASE_PATH:** Path for DB scrips which are kept in the Deployment server. - -**LOG_PATH:** Path where deployment log file will be created - -**COMMON_ROLE_FILENAME:** Contains the common roles creation filename, ex: mosip_role_common.sql - -**APP_ROLE_FILENAME:** Contains specific DB user role creation filename, ex: mosip_role_databaseuser.sql - -**DB_CREATION_FILENAME:** Contains specific DB creation script name, ex: mosip_database_db.sql. - -**ACCESS_GRANT_FILENAME:** This variable contains file name of access provisioning script details for the above created users, ex: mosip__grants.sql. - -**DDL_FILENAME:** DDL script file name, ex:mosip__ddl_deploy.sql. - -**DML_FLAG:** Its a flag variable which contains value as 0 or 1 for any DML existance for the particular DB. if flag=0 then no DML else flag=1. - -**DML_FILENAME:** DML cript file name only if the flag=1, else it will be empty or null, ex: mosip__dml_deploy.sql. - -**Note - Make sure, There is a single empty line at end of the .properties files content and No spaces in beggining and end of the parameter values** - -## DB Deployment for all common module databases with single click deployment: - -**Step 1** -> Make prior modification to all the respective database properties files **(mosip__deploy.properties)** in the respective database directories. Path of properties file and variables list remains same as explained above. Once the properties files are ready then access the directory where the deployment script is kept. - -**Step 2** -> Deployment on all common module databases, run the **"mosip_commons_db_deployment.sh"** script which is avialble in the /database directory. To access **"mosip_commons_db_deployment.sh"** script, follow the below given commands: - - **Enter:-bash-4.2$** cd /home/madmin/database/ - - **Enter:-bash-4.2$** bash mosip_commons_db_deployment.sh - -**Step 3** -> Please observe Post Deployment Validation steps below - -**No modification required to be done on any of the <>.sql files in the database folder. If it is required to be modified then please reach out to database team and have it modified.** - -## DB_Deployment for single or selected databases - -**Step 1:** update the properties(.properties) file with the required parameter values for single or selected databases. - -All these .sh and properties files are kept in each database directories. Please follow the below steps: - -**Step 2** -> Login into Deployment server/VM - -**Step 3** -> check the pwd(present working directory). Make sure we are inside the right database folder/directory to run the deployment for that specific database. - -**Enter:-bash-4.2$** pwd -This should be the path if we are performing deployment for the database name **mosip_schema_name** : /home/madmin/database/mosip_ - -**Step 4** -> Please move all the necessary files from local directory to the deployment server directory under respective databases. - -**Step 5** -> After prior modifications to the properties file, run the below deployment shell script as given: - -**Enter:-bash-4.2$** bash mosip__db_deploy.sh mosip__deploy.properties - -**Step 6** -> Please observe Post Deployment Validation steps below - -**No modification required to be done on any of the <>.sql files in the database folder. If it is required to be modified then please reach out to database team and have it modified.** - -### Post Deployment Validation - -**Note:** If you encounter the following messages then please recheck the details(ip address, port number, database name, password) entered in the properties file, the message would be as follows: - -. - - - - - -**Key points during or after the script execution:** - - * Properties file found message - - * Server status - - * Accessing the right path for DB deploy - - * Creates respective roles - - * Check for any active connections - - * Creates roles, creating Database, schemas, granting access, creating respective tables. - - * Loading data or DML operations valid only for those DB's which carries DML actions. - - * End of sourcing or deployment process. - -**Post deployment process, look out for database deployment log file which captures all stages of deployment. Log file path is defined in the properties file of the databases.** - -**During all the above stages please watch out for any errors which will be capture in the log file.** - -Kindly ignore **NOTICE** or **SKIPPING** messages. As these messages states that particular action is already in place hence sql script ignore performing again. - -### Post deployment process, look out for each database deployment log files which captures all stages of deployment. Log file path is defined in the properties file of the respective databases. +Developers may run the SQLs using `/deploy.sh` script. diff --git a/data-archive/db_scripts/mosip_archive/db.sql b/data-archive/db_scripts/mosip_archive/db.sql new file mode 100644 index 00000000..66b85033 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/db.sql @@ -0,0 +1,14 @@ +CREATE DATABASE mosip_archive + ENCODING = 'UTF8' + LC_COLLATE = 'en_US.UTF-8' + LC_CTYPE = 'en_US.UTF-8' + TABLESPACE = pg_default + OWNER = postgres + TEMPLATE = template0; +COMMENT ON DATABASE mosip_archive IS 'Pre-registration database to store the data that is captured as part of pre-registration process'; + +\c mosip_archive + +CREATE SCHEMA archive; +ALTER SCHEMA archive OWNER TO postgres; +ALTER DATABASE mosip_archive SET search_path TO archive,pg_catalog,public; diff --git a/data-archive/db_scripts/mosip_archive/ddl.sql b/data-archive/db_scripts/mosip_archive/ddl.sql new file mode 100644 index 00000000..43df85d5 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl.sql @@ -0,0 +1,43 @@ +\c mosip_archive +\ir ddl/archive-audit-app_audit_log.sql +\ir ddl/archive-credential-batch_job_execution.sql +\ir ddl/archive-credential-batch_job_execution_context.sql +\ir ddl/archive-credential-batch_job_execution_params.sql +\ir ddl/archive-credential-batch_job_instance.sql +\ir ddl/archive-credential-batch_step_execution.sql +\ir ddl/archive-credential-batch_step_execution_context.sql +\ir ddl/archive-ida-credential_event_store.sql +\ir ddl/archive-credential-credential_transaction.sql +\ir ddl/archive-esignet-consent_history.sql +\ir ddl/archive-ida-anonymous_profile.sql +\ir ddl/archive-ida-auth_transaction.sql +\ir ddl/archive-ida-batch_job_execution_context.sql +\ir ddl/archive-ida-batch_job_execution_params.sql +\ir ddl/archive-ida-batch_job_execution.sql +\ir ddl/archive-ida-batch_job_instance.sql +\ir ddl/archive-ida-batch_step_execution_context.sql +\ir ddl/archive-ida-batch_step_execution.sql +\ir ddl/archive-ida-otp_transaction.sql +\ir ddl/archive-idrepo-anonymous_profile.sql +\ir ddl/archive-idrepo-credential_request_status.sql +\ir ddl/archive-idrepo-uin_draft.sql +\ir ddl/archive-kernel-otp_transaction.sql +\ir ddl/archive-master-bulkupload_transaction.sql +\ir ddl/archive-master-device_master_h.sql +\ir ddl/archive-master-machine_master_h.sql +\ir ddl/archive-master-registration_center_h.sql +\ir ddl/archive-master-user_detail_h.sql +\ir ddl/archive-master-zone_user_h.sql +\ir ddl/archive-pms-auth_policy_h.sql +\ir ddl/archive-pms-partner_h.sql +\ir ddl/archive-pms-secure_biometric_interface_h.sql +\ir ddl/archive-resident_grievance_ticket.sql +\ir ddl/archive-resident-otp_transaction.sql +\ir ddl/archive-resident_session.sql +\ir ddl/archive-resident_transaction.sql +\ir ddl/archive-resident_user_actions.sql +\ir ddl/archive-regprc-reg_demo_dedupe_list.sql +\ir ddl/archive-regprc-registration_transaction.sql +\ir ddl/archive-regprc-abis_response_det.sql +\ir ddl/archive-regprc-abis_response.sql +\ir ddl/archive-regprc-abis_request.sql \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql deleted file mode 100644 index 001596ae..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-app_audit_log.sql +++ /dev/null @@ -1,75 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.app_audit_log --- Purpose : Application Audit Log : To track application related audit details for analysing, auditing and reporting purposes --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.app_audit_log | type: TABLE -- --- DROP TABLE IF EXISTS archive.app_audit_log CASCADE; -CREATE TABLE archive.app_audit_log( - log_id character varying(64) NOT NULL, - log_dtimes timestamp NOT NULL, - log_desc character varying(2048), - event_id character varying(64) NOT NULL, - event_type character varying(64) NOT NULL, - event_name character varying(128) NOT NULL, - action_dtimes timestamp NOT NULL, - host_name character varying(128) NOT NULL, - host_ip character varying(16) NOT NULL, - session_user_id character varying(256) NOT NULL, - session_user_name character varying(128), - app_id character varying(64) NOT NULL, - app_name character varying(128) NOT NULL, - module_id character varying(64), - module_name character varying(128), - ref_id character varying(64), - ref_id_type character varying(64), - cr_by character varying(256) NOT NULL, - CONSTRAINT pk_audlog_log_id PRIMARY KEY (log_id) - -); --- ddl-end -- -COMMENT ON TABLE archive.app_audit_log IS 'Application Audit Log : To track application related audit details for analysing, auditing and reporting purposes'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.log_id IS 'Log Id: Unique audit log id for each audit event log entry across the system.'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.log_dtimes IS 'Log DateTimestamp: Audit Log Datetimestamp'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.log_desc IS 'Log Description: Detailed description of the audit event'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.event_id IS 'Event Id: Event ID that triggered for which the audit action happend'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.event_type IS 'Event Type: Type of event that triggered the audit log, like, SYSTEM, USER, APPLICATION, BATCH etc.'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.event_name IS 'Event Name: Event Name of the Event Id captured'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.action_dtimes IS 'Action DateTimestamp: Timestamp of an application action happend.'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.host_name IS 'Host Name: Host Name of the Host ID captured, if any.'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.host_ip IS 'Host Ip: Machine or device host Ip address of audit action event that happend/triggered'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.session_user_id IS 'Session user Id: Active User ID of the person who is logged in to the system and performing any action that triggered the audit log.'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.session_user_name IS 'Session user Name: User Name of the Session User ID.'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.app_id IS 'Application Id: Application Id of audit action happened and logged.'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.app_name IS 'Application Name: Application Name'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.module_id IS 'Module Id: Application Module ID that triggered audit trigger log.'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.module_name IS 'Module Name: Application Module Name of the Module ID captured.'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.ref_id IS 'Reference Id: Reference ID for any cross reference purpose relevant for audit tracking, user id, app id, app or module id, etc.'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.ref_id_type IS 'Reference Id Type: Type of reference id entered'; --- ddl-end -- -COMMENT ON COLUMN archive.app_audit_log.cr_by IS 'Created By : ID or name of the user who create / insert record'; --- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql deleted file mode 100644 index 04a6d805..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_demographic_consumed.sql +++ /dev/null @@ -1,54 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive. --- Purpose : --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ --- object: archive.applicant_demographic_consumed | type: TABLE -- --- DROP TABLE IF EXISTS archive.applicant_demographic_consumed CASCADE; -CREATE TABLE archive.applicant_demographic_consumed( - prereg_id character varying(36) NOT NULL, - demog_detail bytea NOT NULL, - demog_detail_hash character varying(64) NOT NULL, - encrypted_dtimes timestamp NOT NULL, - status_code character varying(36) NOT NULL, - lang_code character varying(3) NOT NULL, - cr_appuser_id character varying(256) NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - CONSTRAINT pk_appldemc_prereg_id PRIMARY KEY (prereg_id) - -); --- ddl-end -- -COMMENT ON TABLE archive.applicant_demographic_consumed IS 'Applicant Demographic Consumed: Stores demographic details of an applicant that was comsumed.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.prereg_id IS 'Pre Registration ID: Unique Id generated for an individual during the pre-registration process which will be referenced during registration process at a registration center.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.demog_detail IS 'Demographic Detail: Demographic details of an individual, stored in json format.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.demog_detail_hash IS 'Demographic Detail Hash: Hash value of the demographic details stored in json format in a separate column. This will be used to make sure that nobody has tampered the data.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.encrypted_dtimes IS 'Encrypted Data Time: Date and time when the data was encrypted. This will also be used get the key for decrypting the data.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.status_code IS 'Status Code: Status of the pre-registration application. The application can be in draft / pending state or submitted state'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.cr_appuser_id IS 'Applciation Created User Id: User ID of the individual who is submitting the pre-registration application. It can be for self or for others like family members.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.cr_by IS 'Created By : ID or name of the user who create / insert record.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_demographic_consumed.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- - diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql deleted file mode 100644 index 696e7ee0..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-applicant_document_consumed.sql +++ /dev/null @@ -1,75 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.applicant_document_consumed --- Purpose : Applicant Document Consumed: Documents that are uploaded as part of pre-registration process which was consumed is maintained here. --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ --- object: archive.applicant_document_consumed | type: TABLE -- --- DROP TABLE IF EXISTS archive.applicant_document_consumed CASCADE; -CREATE TABLE archive.applicant_document_consumed( - id character varying(36) NOT NULL, - prereg_id character varying(36) NOT NULL, - doc_name character varying(128) NOT NULL, - doc_cat_code character varying(36) NOT NULL, - doc_typ_code character varying(36) NOT NULL, - doc_file_format character varying(36) NOT NULL, - doc_id character varying(128) NOT NULL, - doc_hash character varying(64) NOT NULL, - encrypted_dtimes timestamp NOT NULL, - status_code character varying(36) NOT NULL, - lang_code character varying(3) NOT NULL, - cr_by character varying(256), - cr_dtimes timestamp, - upd_by character varying(256), - upd_dtimes timestamp, - CONSTRAINT pk_appldocc_prereg_id PRIMARY KEY (id) - -); --- indexes section ------------------------------------------------- -create unique index idx_appldocc_prereg_id on archive.applicant_document_consumed (prereg_id, doc_cat_code, doc_typ_code) ; - --- ddl-end -- -COMMENT ON TABLE archive.applicant_document_consumed IS 'Applicant Document Consumed: Documents that are uploaded as part of pre-registration process which was consumed is maintained here. '; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.id IS 'Id: Unique id generated for the documents being uploaded as part of pre-registration process.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.prereg_id IS 'Pre Registration Id: Id of the pre-registration application for which the documents are being uploaded.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.doc_name IS 'Document Name: Name of the document that is uploaded'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.doc_cat_code IS 'Document Category Code: Document category code under which the document is being uploaded. Refers to master.document_category.code'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.doc_typ_code IS 'Document Type Code: Document type code under which the document is being uploaded. Refers to master.document_type.code'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.doc_file_format IS 'Documenet File Format: Format in which the document is being uploaded. Refers to master.document_file_format.code'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.doc_id IS 'Document Id: ID of the document being uploaded'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.doc_hash IS 'Document Hash: Hash value of the document being uploaded in document store. This will be used to make sure that nobody has tampered the document stored in a separate store. '; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.encrypted_dtimes IS 'Encrypted Data Time: Date and time when the document was encrypted before uploading it on document store. This will also be used get the key for decrypting the data.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.status_code IS 'Status Code: Status of the document that is being uploaded.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.cr_by IS 'Created By : ID or name of the user who create / insert record.'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.applicant_document_consumed.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- - - - - - - - diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-applications.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-applications.sql deleted file mode 100644 index 6196d359..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-applications.sql +++ /dev/null @@ -1,33 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.applications --- Purpose : Applications: --- --- Create By : Ram Bhatt --- Created Date : Oct-2021 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ --- object: archive.applications | type: TABLE -- --- DROP TABLE IF EXISTS archive.applications CASCADE; -CREATE TABLE archive.applications( - application_id character varying(36) NOT NULL, - booking_type character varying(256) NOT NULL, - booking_status_code character varying(256), - application_status_code character varying(256), - regcntr_id character varying(10), - appointment_date date, - booking_date date, - slot_from_time time without time zone, - slot_to_time time without time zone, - contact_info character varying(256), - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp without time zone NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp without time zone, - CONSTRAINT appid_pk PRIMARY KEY (application_id) - -); --- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-audit-app_audit_log.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-audit-app_audit_log.sql new file mode 100644 index 00000000..22c7dff3 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-audit-app_audit_log.sql @@ -0,0 +1,63 @@ +-- object: archive.mosip_audit_app_audit_log | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_audit_app_audit_log CASCADE; +CREATE TABLE archive.mosip_audit_app_audit_log( + log_id character varying(64) NOT NULL, + log_dtimes timestamp NOT NULL, + log_desc character varying(2048), + event_id character varying(64) NOT NULL, + event_type character varying(64) NOT NULL, + event_name character varying(128) NOT NULL, + action_dtimes timestamp NOT NULL, + host_name character varying(128) NOT NULL, + host_ip character varying(256) NOT NULL, + session_user_id character varying(256) NOT NULL, + session_user_name character varying(128), + app_id character varying(64) NOT NULL, + app_name character varying(128) NOT NULL, + module_id character varying(64), + module_name character varying(128), + ref_id character varying(64), + ref_id_type character varying(64), + cr_by character varying(256) NOT NULL, + CONSTRAINT pk_audlog_log_id PRIMARY KEY (log_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_audit_app_audit_log IS 'Application Audit Log : To track application related audit details for analysing, auditing and reporting purposes'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.log_id IS 'Log Id: Unique audit log id for each audit event log entry across the system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.log_dtimes IS 'Log DateTimestamp: Audit Log Datetimestamp'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.log_desc IS 'Log Description: Detailed description of the audit event'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.event_id IS 'Event Id: Event ID that triggered for which the audit action happend'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.event_type IS 'Event Type: Type of event that triggered the audit log, like, SYSTEM, USER, APPLICATION, BATCH etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.event_name IS 'Event Name: Event Name of the Event Id captured'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.action_dtimes IS 'Action DateTimestamp: Timestamp of an application action happend.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.host_name IS 'Host Name: Host Name of the Host ID captured, if any.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.host_ip IS 'Host Ip: Machine or device host Ip address of audit action event that happend/triggered'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.session_user_id IS 'Session user Id: Active User ID of the person who is logged in to the system and performing any action that triggered the audit log.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.session_user_name IS 'Session user Name: User Name of the Session User ID.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.app_id IS 'Application Id: Application Id of audit action happened and logged.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.app_name IS 'Application Name: Application Name'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.module_id IS 'Module Id: Application Module ID that triggered audit trigger log.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.module_name IS 'Module Name: Application Module Name of the Module ID captured.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.ref_id IS 'Reference Id: Reference ID for any cross reference purpose relevant for audit tracking, user id, app id, app or module id, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.ref_id_type IS 'Reference Id Type: Type of reference id entered'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_audit_app_audit_log.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql deleted file mode 100644 index 24200af5..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-auth_transaction.sql +++ /dev/null @@ -1,90 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.auth_transaction --- Purpose : Authentication Transaction : To track all authentication transactions steps / stages in the process --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.auth_transaction | type: TABLE -- --- DROP TABLE IF EXISTS archive.auth_transaction CASCADE; -CREATE TABLE archive.auth_transaction( - id character varying(36) NOT NULL, - request_dtimes timestamp NOT NULL, - response_dtimes timestamp NOT NULL, - request_trn_id character varying(64), - auth_type_code character varying(36) NOT NULL, - status_code character varying(36) NOT NULL, - status_comment character varying(1024), - lang_code character varying(3) NOT NULL, - ref_id_type character varying(36), - ref_id character varying(64), - token_id character varying(128) NOT NULL, - requested_entity_type character varying(64), - requested_entity_id character varying(36), - requested_entity_name character varying(128), - static_tkn_id character varying(64), - request_signature character varying, - response_signature character varying, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - CONSTRAINT pk_authtrn_id PRIMARY KEY (id) - -); --- ddl-end -- -COMMENT ON TABLE archive.auth_transaction IS 'Authentication Transaction : To track all authentication transactions steps / stages in the process flow.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.id IS 'ID: This is unique transaction id assigned for each authentication transaction'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.request_dtimes IS 'Request Datetimestamp : Timestamp of Authentication request received from client system.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.response_dtimes IS 'Response Datetimestamp : Date timestamp of response sent back to client system for the authentication request. '; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.request_trn_id IS 'Request Transaction Id : Unique Authentication request transaction id assigned for each request received from client system.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.auth_type_code IS 'Authentication Type Code : Type of authentication for the specific transaction, for ex., OTP, BIO, DEMO, etc'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.status_code IS 'Status Code : Current Status code of the transaction in a process flow.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.status_comment IS 'Status Comment : Description for the status entered/updated by user or system assigned for the specific transaction.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.ref_id_type IS 'Reference Id Type: Type of reference id entered in reference id column for ex., USER, VIRTUALID, UIN, PREREG, etc.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.ref_id IS 'Reference Id: Reference ID for any cross reference purpose relevant for tracking, for ex., user id, uin, vid, prereg id, rid etc.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.token_id IS 'Token ID : Token ID generated in reference with UIN/VID'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.requested_entity_type IS 'Requested Entity Type: Type of entity through which the authentication request was initiated. It can from a partner, internal authenticaition, etc.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.requested_entity_id IS 'Requested Entity Id: ID of the entity through which the authentication request was initiated.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.requested_entity_name IS 'Requested Entity Name: Name of the entity through which the authentication request was initiated.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.static_tkn_id IS 'Static Token Id : This is a static token id assigned for each authentication request. Static token id is combination of TSPID + UIN generated for any TSP or Individuls and sent back in response. End user can use this id while authenticating themselves. '; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.request_signature IS 'Request Signature: Request body information stored with signed'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.response_signature IS 'Response Signature: Response body stored with signed'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.auth_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution.sql new file mode 100644 index 00000000..0d928d22 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution.sql @@ -0,0 +1,20 @@ +-- Table: archive.mosip_credential_batch_job_execution + +-- DROP TABLE archive.mosip_credential_batch_job_execution; + +CREATE TABLE archive.mosip_credential_batch_job_execution ( + JOB_EXECUTION_ID BIGINT PRIMARY KEY , + VERSION BIGINT, + JOB_INSTANCE_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(20), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP, + JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution_context.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution_context.sql new file mode 100644 index 00000000..d66df155 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution_context.sql @@ -0,0 +1,14 @@ +-- Table: archive.mosip_credential_batch_job_execution_context + +-- DROP TABLE archive.mosip_credential_batch_job_execution_context; + +CREATE TABLE archive.mosip_credential_batch_job_execution_context +( + job_execution_id bigint NOT NULL, + short_context character varying(2500) COLLATE pg_catalog."default" NOT NULL, + serialized_context text COLLATE pg_catalog."default", + CONSTRAINT mosip_credential_batch_job_execution_context_pkey PRIMARY KEY (job_execution_id) +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution_params.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution_params.sql new file mode 100644 index 00000000..9d0e0f64 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_execution_params.sql @@ -0,0 +1,17 @@ +-- Table: archive.mosip_credential_batch_job_execution_params + +-- DROP TABLE archive.mosip_credential_batch_job_execution_params; + +CREATE TABLE archive.mosip_credential_batch_job_execution_params ( + JOB_EXECUTION_ID BIGINT NOT NULL , + TYPE_CD VARCHAR(6) NOT NULL , + KEY_NAME VARCHAR(100) NOT NULL , + STRING_VAL VARCHAR(250) , + DATE_VAL TIMESTAMP DEFAULT NULL , + LONG_VAL BIGINT , + DOUBLE_VAL DOUBLE PRECISION , + IDENTIFYING CHAR(1) NOT NULL +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_instance.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_instance.sql new file mode 100644 index 00000000..7fce7c10 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_job_instance.sql @@ -0,0 +1,13 @@ +-- Table: archive.mosip_credential_batch_job_instance + +-- DROP TABLE archive.mosip_credential_batch_job_instance; + +CREATE TABLE archive.mosip_credential_batch_job_instance ( + JOB_INSTANCE_ID BIGINT PRIMARY KEY , + VERSION BIGINT, + JOB_NAME VARCHAR(100) NOT NULL , + JOB_KEY VARCHAR(2500) +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_step_execution.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_step_execution.sql new file mode 100644 index 00000000..810ed7f6 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_step_execution.sql @@ -0,0 +1,27 @@ +-- Table: archive.mosip_credential_batch_step_execution + +-- DROP TABLE archive.mosip_credential_batch_step_execution; + +CREATE TABLE archive.mosip_credential_batch_step_execution ( + STEP_EXECUTION_ID BIGINT PRIMARY KEY , + VERSION BIGINT NOT NULL, + STEP_NAME VARCHAR(100) NOT NULL, + JOB_EXECUTION_ID BIGINT NOT NULL, + START_TIME TIMESTAMP NOT NULL , + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT , + READ_COUNT BIGINT , + FILTER_COUNT BIGINT , + WRITE_COUNT BIGINT , + READ_SKIP_COUNT BIGINT , + WRITE_SKIP_COUNT BIGINT , + PROCESS_SKIP_COUNT BIGINT , + ROLLBACK_COUNT BIGINT , + EXIT_CODE VARCHAR(20) , + EXIT_MESSAGE VARCHAR(2500) , + LAST_UPDATED TIMESTAMP +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_step_execution_context.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_step_execution_context.sql new file mode 100644 index 00000000..100f62ce --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-batch_step_execution_context.sql @@ -0,0 +1,15 @@ +-- Table: archive.mosip_credential_batch_step_execution_context + +-- DROP TABLE archive.mosip_credential_batch_step_execution_context; + +CREATE TABLE archive.mosip_credential_batch_step_execution_context +( + step_execution_id bigint NOT NULL, + short_context character varying(2500) COLLATE pg_catalog."default" NOT NULL, + serialized_context text COLLATE pg_catalog."default", + CONSTRAINT mosip_credential_batch_step_execution_context_pkey PRIMARY KEY (step_execution_id) + +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-credential-credential_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-credential_transaction.sql new file mode 100644 index 00000000..b09a39da --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-credential-credential_transaction.sql @@ -0,0 +1,69 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_credential +-- Table Name : archive.mosip_credential_credential_transaction +-- Purpose : Credential: The credential share is a functional service that interacts with the ID Repository and collects the user attributes for printing. +-- +-- Create By : Sadanandegowda DM +-- Created Date : Aug-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- Jan-2021 Ram Bhatt Set is_deleted flag to not null and default false +-- Mar-2021 Ram Bhatt Reverting is_deleted not null changes +-- Apr-2021 Ram Bhatt status_comment added +-- ------------------------------------------------------------------------------------------ +-- object: archive.mosip_credential_credential_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_credential_credential_transaction CASCADE; +CREATE TABLE archive.mosip_credential_credential_transaction( + id character varying(36) NOT NULL, + credential_id character varying(36), + request character varying, + status_code character varying(32) NOT NULL, + datashareurl character varying(256), + issuancedate timestamp, + signature character varying, + trn_retry_count smallint, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + status_comment character varying(512), + CONSTRAINT pk_credtrn_id PRIMARY KEY (id) + +); + +CREATE INDEX cred_tran_NEW_status_cr_dtimes ON archive.mosip_credential_credential_transaction USING btree (cr_dtimes) WHERE status_code = 'NEW'; + +-- ddl-end -- +COMMENT ON TABLE archive.mosip_credential_credential_transaction IS 'Credential: The credential share is a functional service that interacts with the ID Repository and collects the user attributes for printing'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.id IS 'ID: Unique id generated by the system for each credentials generated'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.credential_id IS 'Credential Id: Credential id generated when distribute credential'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.request IS 'Request: Request json of credential request genrator'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.status_code IS 'Status Code: Contains status of request'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.datashareurl IS 'Datashare URL: Credential data url'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.issuancedate IS 'Issuance Date: Credential issue date'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.signature IS 'Signature: Signature of credential data'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.trn_retry_count IS 'Retry Count: Request retry count'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_credential_credential_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-credential_event_store.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-credential_event_store.sql deleted file mode 100644 index 42baf9de..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-credential_event_store.sql +++ /dev/null @@ -1,67 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.credential_event_store --- Purpose : --- --- Create By : Ram Bhatt --- Created Date : Oct-2021 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- ------------------------------------------------------------------------------------------ - --- object: archive.credential_event_store | type: TABLE -- --- DROP TABLE IF EXISTS archive.credential_event_store CASCADE; -CREATE TABLE archive.credential_event_store( - event_id character varying(36) NOT NULL, - event_topic character varying(256) NOT NULL, - credential_transaction_id character varying(36) NOT NULL, - publisher character varying(128), - published_on_dtimes timestamp, - event_object character varying, - status_code character varying(36), - retry_count smallint, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean DEFAULT FALSE, - del_dtimes timestamp, - CONSTRAINT pk_ces_id PRIMARY KEY (event_id) - -); --- ddl-end -- ---index section starts---- -CREATE INDEX ind_ces_id ON archive.credential_event_store (cr_dtimes); ---index section ends------ -COMMENT ON TABLE archive.credential_event_store IS 'Credential Event Store: Store all credential request in IDA and their status, Retry request incase of failure'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.event_id IS 'Event ID: Event id of the credential request'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.event_topic IS 'Event Topic: Topic of the credential request where message is requested through websub'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.credential_transaction_id IS 'Credential transaction id where credential request details are stored'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.publisher IS 'Pusblisher of the messages'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.published_on_dtimes IS 'Date and time of the message published'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.event_object IS 'Credential event object details'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.status_code IS 'Status of the envent ex: NEW, STORED, FAILED, FAILED_WITH_MAX_RETRIES'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.retry_count IS 'Retry count of the credential request event incase of failure'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.cr_by IS 'Created By : ID or name of the user who create / insert record'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.credential_event_store.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- - diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-esignet-consent_history.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-esignet-consent_history.sql new file mode 100644 index 00000000..f4b4d3e7 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-esignet-consent_history.sql @@ -0,0 +1,44 @@ +-- This Source Code Form is subject to the terms of the Mozilla Public +-- License, v. 2.0. If a copy of the MPL was not distributed with this +-- file, You can obtain one at https://mozilla.org/MPL/2.0/. +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_esignet +-- Table Name : archive.mosip_esignet_consent_history +-- Purpose : To store user consent details +-- +-- Create By : Hitesh C +-- Created Date : May-2023 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- ------------------------------------------------------------------------------------------ + +create table archive.mosip_esignet_consent_history ( + id UUID NOT NULL, + client_id VARCHAR NOT NULL, + psu_token VARCHAR NOT NULL, + claims VARCHAR NOT NULL, + authorization_scopes VARCHAR NOT NULL, + cr_dtimes TIMESTAMP DEFAULT NOW() NOT NULL, + expire_dtimes TIMESTAMP, + signature VARCHAR, + hash VARCHAR, + accepted_claims VARCHAR, + permitted_scopes VARCHAR, + PRIMARY KEY (id) +); +CREATE INDEX IF NOT EXISTS idx_mosip_esignet_consent_history_psu_client ON mosip_esignet_consent_history(psu_token, client_id); + +COMMENT ON TABLE archive.mosip_esignet_consent_history IS 'Contains user consent details'; + +COMMENT ON COLUMN archive.mosip_esignet_consent_history.id IS 'UUID : Unique id associated with each consent'; +COMMENT ON COLUMN archive.mosip_esignet_consent_history.client_id IS 'Client_id: associated with relying party'; +COMMENT ON COLUMN archive.mosip_esignet_consent_history.psu_token IS 'PSU token associated with user consent'; +COMMENT ON COLUMN archive.mosip_esignet_consent_history.claims IS 'Json of requested and user accepted claims'; +COMMENT ON COLUMN archive.mosip_esignet_consent_history.authorization_scopes IS 'Json string of requested authorization scope'; +COMMENT ON COLUMN archive.mosip_esignet_consent_history.cr_dtimes IS 'Consent creation date'; +COMMENT ON COLUMN archive.mosip_esignet_consent_history.expire_dtimes IS 'Expiration date'; +COMMENT ON COLUMN archive.mosip_esignet_consent_history.signature IS 'Signature of consent object '; +COMMENT ON COLUMN archive.mosip_esignet_consent_history.hash IS 'hash of consent object'; +COMMENT ON COLUMN archive.mosip_esignet_consent_history.accepted_claims IS 'Accepted Claims by the user'; +COMMENT ON COLUMN archive.mosip_esignet_consent_history.permitted_scopes IS 'Accepted Scopes by the user'; diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-ida-anonymous_profile.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-anonymous_profile.sql new file mode 100644 index 00000000..2c2863be --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-anonymous_profile.sql @@ -0,0 +1,45 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_ida +-- Table Name : mosip_ida_anonymous_profile +-- Purpose : anonymous_profile: Anonymous profiling information for reporting purpose. +-- +-- Create By : Loganathan Sekar +-- Created Date : 10-Sep-2021 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- Sep-2021 Loganathan Sekar Created anonymous_profile table +-- ------------------------------------------------------------------------------------------ + +-- object: archive.mosip_ida_anonymous_profile | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_ida_anonymous_profile CASCADE; +CREATE TABLE archive.mosip_ida_anonymous_profile( + id character varying(36) NOT NULL, + profile character varying NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_profile PRIMARY KEY (id) +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_ida_anonymous_profile IS 'anonymous_profile: Anonymous profiling information for reporting purpose.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_anonymous_profile.id IS 'Reference ID: System generated id for references in the system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_anonymous_profile.profile IS 'Profile : Contains complete anonymous profile data generated by ID-Repository and stored in plain json text format.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_anonymous_profile.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_anonymous_profile.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_anonymous_profile.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_anonymous_profile.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_anonymous_profile.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_anonymous_profile.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-ida-auth_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-auth_transaction.sql new file mode 100644 index 00000000..ff701c3d --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-auth_transaction.sql @@ -0,0 +1,97 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_ida +-- Table Name : mosip_ida_auth_transaction +-- Purpose : Authentication Transaction : To track all authentication transactions steps / stages in the process flow. +-- +-- Create By : Nasir Khan / Sadanandegowda +-- Created Date : 15-Jul-2019 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- Sep-2020 Sadanandegowda DM Removed uin and uin_hash attribute and added token_id +-- Jan-2021 Ram Bhatt Set is_deleted flag to not null and default false +-- Feb-2021 Ram Bhatt Changed size of auth_type_code from 32 to 128 +-- Sep-2021 Ram Bhatt Added index to request_trn_id, request_dtimes, token_id columns +-- ------------------------------------------------------------------------------------------ + +-- object: archive.mosip_ida_auth_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_ida_auth_transaction CASCADE; +CREATE TABLE archive.mosip_ida_auth_transaction( + id character varying(36) NOT NULL, + request_dtimes timestamp NOT NULL, + response_dtimes timestamp NOT NULL, + request_trn_id character varying(64), + auth_type_code character varying(128) NOT NULL, + status_code character varying(36) NOT NULL, + status_comment character varying(1024), + lang_code character varying(3) NOT NULL, + ref_id_type character varying(36), + ref_id character varying(64), + token_id character varying(128) NOT NULL, + requested_entity_type character varying(64), + requested_entity_id character varying(36), + requested_entity_name character varying(128), + static_tkn_id character varying(64), + request_signature character varying, + response_signature character varying, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean NOT NULL DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_authtrn_id PRIMARY KEY (id) + +); +-- ddl-end -- +--index section starts---- +CREATE INDEX ind_reqtrnid_dtimes_tknid ON archive.mosip_ida_auth_transaction (request_trn_id, request_dtimes, token_id,cr_dtimes, auth_type_code); +--index section ends------ +COMMENT ON TABLE archive.mosip_ida_auth_transaction IS 'Authentication Transaction : To track all authentication transactions steps / stages in the process flow.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.id IS 'ID: This is unique transaction id assigned for each authentication transaction'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.request_dtimes IS 'Request Datetimestamp : Timestamp of Authentication request received from client system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.response_dtimes IS 'Response Datetimestamp : Date timestamp of response sent back to client system for the authentication request. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.request_trn_id IS 'Request Transaction Id : Unique Authentication request transaction id assigned for each request received from client system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.auth_type_code IS 'Authentication Type Code : Type of authentication for the specific transaction, for ex., OTP, BIO, DEMO, etc'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.status_code IS 'Status Code : Current Status code of the transaction in a process flow.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.status_comment IS 'Status Comment : Description for the status entered/updated by user or system assigned for the specific transaction.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.ref_id_type IS 'Reference Id Type: Type of reference id entered in reference id column for ex., USER, VIRTUALID, UIN, PREREG, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.ref_id IS 'Reference Id: Reference ID for any cross reference purpose relevant for tracking, for ex., user id, uin, vid, prereg id, rid etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.token_id IS 'Token ID : Token ID generated in reference with UIN/VID'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.requested_entity_type IS 'Requested Entity Type: Type of entity through which the authentication request was initiated. It can from a partner, internal authenticaition, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.requested_entity_id IS 'Requested Entity Id: ID of the entity through which the authentication request was initiated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.requested_entity_name IS 'Requested Entity Name: Name of the entity through which the authentication request was initiated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.static_tkn_id IS 'Static Token Id : This is a static token id assigned for each authentication request. Static token id is combination of TSPID + UIN generated for any TSP or Individuls and sent back in response. End user can use this id while authenticating themselves. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.request_signature IS 'Request Signature: Request body information stored with signed'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.response_signature IS 'Response Signature: Response body stored with signed'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_auth_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution.sql new file mode 100644 index 00000000..4fd45e2b --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution.sql @@ -0,0 +1,20 @@ +-- Table: archive.mosip_ida_batch_job_execution + +-- DROP TABLE archive.mosip_ida_batch_job_execution; + +CREATE TABLE archive.mosip_ida_batch_job_execution ( + JOB_EXECUTION_ID BIGINT PRIMARY KEY , + VERSION BIGINT, + JOB_INSTANCE_ID BIGINT NOT NULL, + CREATE_TIME TIMESTAMP NOT NULL, + START_TIME TIMESTAMP DEFAULT NULL, + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + EXIT_CODE VARCHAR(20), + EXIT_MESSAGE VARCHAR(2500), + LAST_UPDATED TIMESTAMP, + JOB_CONFIGURATION_LOCATION VARCHAR(2500) NULL +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution_context.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution_context.sql new file mode 100644 index 00000000..49c75fff --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution_context.sql @@ -0,0 +1,14 @@ +-- Table: archive.mosip_ida_batch_job_execution_context + +-- DROP TABLE archive.mosip_ida_batch_job_execution_context; + +CREATE TABLE archive.mosip_ida_batch_job_execution_context +( + job_execution_id bigint NOT NULL, + short_context character varying(2500) COLLATE pg_catalog."default" NOT NULL, + serialized_context text COLLATE pg_catalog."default", + CONSTRAINT batch_job_execution_context_pkey PRIMARY KEY (job_execution_id) +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution_params.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution_params.sql new file mode 100644 index 00000000..8732698d --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_execution_params.sql @@ -0,0 +1,17 @@ +-- Table: mosip_ida_batch_job_execution_params + +-- DROP TABLE archive.mosip_ida_batch_job_execution_params; + +CREATE TABLE archive.mosip_ida_batch_job_execution_params ( + JOB_EXECUTION_ID BIGINT NOT NULL , + TYPE_CD VARCHAR(6) NOT NULL , + KEY_NAME VARCHAR(100) NOT NULL , + STRING_VAL VARCHAR(250) , + DATE_VAL TIMESTAMP DEFAULT NULL , + LONG_VAL BIGINT , + DOUBLE_VAL DOUBLE PRECISION , + IDENTIFYING CHAR(1) NOT NULL +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_instance.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_instance.sql new file mode 100644 index 00000000..15bab9aa --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_job_instance.sql @@ -0,0 +1,13 @@ +-- Table:mosip_ida_batch_job_instance + +-- DROP TABLE archive.mosip_ida_batch_job_instance; + +CREATE TABLE archive.mosip_ida_batch_job_instance ( + JOB_INSTANCE_ID BIGINT PRIMARY KEY , + VERSION BIGINT, + JOB_NAME VARCHAR(100) NOT NULL , + JOB_KEY VARCHAR(2500) +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_step_execution.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_step_execution.sql new file mode 100644 index 00000000..347e787b --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_step_execution.sql @@ -0,0 +1,27 @@ +-- Table: archive.mosip_ida_batch_step_execution + +-- DROP TABLE archive.mosip_ida_batch_step_execution; + +CREATE TABLE archive.mosip_ida_batch_step_execution ( + STEP_EXECUTION_ID BIGINT PRIMARY KEY , + VERSION BIGINT NOT NULL, + STEP_NAME VARCHAR(100) NOT NULL, + JOB_EXECUTION_ID BIGINT NOT NULL, + START_TIME TIMESTAMP NOT NULL , + END_TIME TIMESTAMP DEFAULT NULL, + STATUS VARCHAR(10), + COMMIT_COUNT BIGINT , + READ_COUNT BIGINT , + FILTER_COUNT BIGINT , + WRITE_COUNT BIGINT , + READ_SKIP_COUNT BIGINT , + WRITE_SKIP_COUNT BIGINT , + PROCESS_SKIP_COUNT BIGINT , + ROLLBACK_COUNT BIGINT , + EXIT_CODE VARCHAR(20) , + EXIT_MESSAGE VARCHAR(2500) , + LAST_UPDATED TIMESTAMP +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_step_execution_context.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_step_execution_context.sql new file mode 100644 index 00000000..9497f5b5 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-batch_step_execution_context.sql @@ -0,0 +1,15 @@ +-- Table: archive.mosip_ida_batch_step_execution_context + +-- DROP TABLE archive.mosip_ida_batch_step_execution_context; + +CREATE TABLE archive.mosip_ida_batch_step_execution_context +( + step_execution_id bigint NOT NULL, + short_context character varying(2500) COLLATE pg_catalog."default" NOT NULL, + serialized_context text COLLATE pg_catalog."default", + CONSTRAINT batch_step_execution_context_pkey PRIMARY KEY (step_execution_id) + +) +WITH ( + OIDS = FALSE +); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-ida-credential_event_store.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-credential_event_store.sql new file mode 100644 index 00000000..ffd1c9cb --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-credential_event_store.sql @@ -0,0 +1,69 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_ida +-- Table Name : archive.mosip_ida_credential_event_store +-- Purpose : +-- +-- Create By : Sadanandegowda DM +-- Created Date : Jan-2021 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- Jan-2021 Ram Bhatt Set is_deleted flag to not null and default false +-- Mar-2021 Ram Bhatt Reverting is_deleted not null changes +-- Sep-2021 Ram Bhatt Added index to cr_dtimes column +-- ------------------------------------------------------------------------------------------ + +-- object: archive.mosip_ida_credential_event_store | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_ida_credential_event_store CASCADE; +CREATE TABLE archive.mosip_ida_credential_event_store( + event_id character varying(36) NOT NULL, + event_topic character varying(256) NOT NULL, + credential_transaction_id character varying(36) NOT NULL, + publisher character varying(128), + published_on_dtimes timestamp, + event_object character varying, + status_code character varying(36), + retry_count smallint, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_ces_id PRIMARY KEY (event_id) + +); +-- ddl-end -- +--index section starts---- +CREATE INDEX ind_ces_id ON archive.mosip_ida_credential_event_store (cr_dtimes); +--index section ends------ +COMMENT ON TABLE archive.mosip_ida_credential_event_store IS 'Credential Event Store: Store all credential request in IDA and their status, Retry request incase of failure'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.event_id IS 'Event ID: Event id of the credential request'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.event_topic IS 'Event Topic: Topic of the credential request where message is requested through websub'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.credential_transaction_id IS 'Credential transaction id where credential request details are stored'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.publisher IS 'Pusblisher of the messages'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.published_on_dtimes IS 'Date and time of the message published'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.event_object IS 'Credential event object details'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.status_code IS 'Status of the envent ex: NEW, STORED, FAILED, FAILED_WITH_MAX_RETRIES'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.retry_count IS 'Retry count of the credential request event incase of failure'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_credential_event_store.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-ida-otp_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-otp_transaction.sql new file mode 100644 index 00000000..c8bbb4da --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-ida-otp_transaction.sql @@ -0,0 +1,66 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_ida +-- Table Name : mosip_ida_otp_transaction +-- Purpose : OTP Transaction: All OTP related data and validation details are maintained here for ID Authentication. +-- +-- Create By : Sadanandegowda DM +-- Created Date : Sep-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ + +-- ------------------------------------------------------------------------------------------ +-- object: archive.mosip_ida_otp_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_ida_otp_transaction CASCADE; +CREATE TABLE archive.mosip_ida_otp_transaction( + id character varying(36) NOT NULL, + ref_id character varying(64) NOT NULL, + otp_hash character varying(512) NOT NULL, + generated_dtimes timestamp, + expiry_dtimes timestamp, + validation_retry_count smallint, + status_code character varying(36), + lang_code character varying(3), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_otpt_id PRIMARY KEY (id) + +); +-- ddl-end -- +--index section starts---- +CREATE INDEX ind_otphsh ON archive.mosip_ida_otp_transaction (otp_hash,status_code); +--index section ends------ +COMMENT ON TABLE archive.mosip_ida_otp_transaction IS 'OTP Transaction: All OTP related data and validation details are maintained here for ID Authentication module.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.id IS 'ID: Key alias id is a unique identifier (UUID) used as an alias of the encryption key stored in keystore like HSM (hardware security module).'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.ref_id IS 'Reference ID: Reference ID is a reference information received from OTP requester which can be used while validating the OTP. AM: please give examples of ref_id'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.otp_hash IS 'OTP Hash: Hash of id, ref_id and otp which is generated based on the configuration setup and sent to the requester application / module.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.generated_dtimes IS 'Generated Date Time: Date and Time when the OTP was generated'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.expiry_dtimes IS 'Expiry Date Time: Date Time when the OTP will be expired'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.validation_retry_count IS 'Validation Retry Count: Validation retry counts of this OTP request. If the validation retry crosses the threshold limit, then the OTP will be de-activated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.status_code IS 'Status Code: Status of the OTP whether it is active or expired. AM: please enumerate the status types. They are only a few, not infinite'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_ida_otp_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-anonymous_profile.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-anonymous_profile.sql new file mode 100644 index 00000000..c404d8df --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-anonymous_profile.sql @@ -0,0 +1,45 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_idrepo +-- Table Name : archive.mosip_idrepo_anonymous_profile +-- Purpose : anonymous_profile: Anonymous profiling information for reporting purpose. +-- +-- Create By : Manoj SP +-- Created Date : 10-Sep-2021 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- Sep-2021 Manoj SP Created anonymous_profile table +-- ------------------------------------------------------------------------------------------ + +-- object: archive.mosip_idrepo_anonymous_profile | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_idrepo_anonymous_profile CASCADE; +CREATE TABLE archive.mosip_idrepo_anonymous_profile( + id character varying(36) NOT NULL, + profile character varying NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_profile_idrepo PRIMARY KEY (id) +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_idrepo_anonymous_profile IS 'anonymous_profile: Anonymous profiling information for reporting purpose.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_idrepo_anonymous_profile.id IS 'Reference ID: System generated id for references in the system.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_idrepo_anonymous_profile.profile IS 'Profile : Contains complete anonymous profile data generated by ID-Repository and stored in plain json text format.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_idrepo_anonymous_profile.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_idrepo_anonymous_profile.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_idrepo_anonymous_profile.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_idrepo_anonymous_profile.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_idrepo_anonymous_profile.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_idrepo_anonymous_profile.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-credential_request_status.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-credential_request_status.sql new file mode 100644 index 00000000..63abd7cd --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-credential_request_status.sql @@ -0,0 +1,34 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_repo +-- Table Name : archive.mosip_idrepo_credential_request_status +-- Purpose : Credential Request Status: +-- +-- Create By : Ram Bhatt +-- Created Date : May-2021 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- +-- ------------------------------------------------------------------------------------------ + +-- object: archive.mosip_idrepo_credential_request_status | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_idrepo_credential_request_status CASCADE; +CREATE TABLE archive.mosip_idrepo_credential_request_status ( + individual_id character varying(500) NOT NULL, + individual_id_hash character varying(128) NOT NULL, + partner_id character varying(36) NOT NULL, + request_id character varying(36), + token_id character varying(128), + status character varying(36) NOT NULL, + id_transaction_limit numeric, + id_expiry_timestamp timestamp, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted bool DEFAULT false, + del_dtimes timestamp, + CONSTRAINT credential_request_status_pk PRIMARY KEY (individual_id_hash,partner_id) + +); +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-uin_draft.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-uin_draft.sql new file mode 100644 index 00000000..c14838a8 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-idrepo-uin_draft.sql @@ -0,0 +1,33 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_repo +-- Table Name : archive.mosip_idrepo_uin_draft +-- Purpose : UIN Hash Salt: +-- +-- Create By : Ram Bhatt +-- Created Date : Jul-2021 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- Sep-2021 Manoj SP Removed Anonymous Profile column +-- ------------------------------------------------------------------------------------------ + +-- object: archive.mosip_idrepo_uin_draft | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_idrepo_uin_draft CASCADE; +CREATE TABLE archive.mosip_idrepo_uin_draft( + reg_id character varying (39) NOT NULL, + uin character varying (500) NOT NULL, + uin_hash character varying (128) NOT NULL, + uin_data bytea, + uin_data_hash character varying (64), + status_code character varying (32) NOT NULL, + cr_by character varying (256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying (256), + upd_dtimes timestamp, + is_deleted bool DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_uindft_id PRIMARY KEY (reg_id), + CONSTRAINT unq_uin UNIQUE (uin), + CONSTRAINT unq_uinhsh UNIQUE (uin_hash) +); +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-kernel-otp_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-kernel-otp_transaction.sql new file mode 100644 index 00000000..fe2a34e1 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-kernel-otp_transaction.sql @@ -0,0 +1,56 @@ + + +-- object: archive.mosip_kernel_otp_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_kernel_otp_transaction CASCADE; +CREATE TABLE archive.mosip_kernel_otp_transaction( + id character varying(64) NOT NULL, + ref_id character varying(64), + ref_id_type character varying(64), + otp character varying(8), + generated_dtimes timestamp, + expiry_dtimes timestamp, + validation_retry_count smallint, + status_code character varying(64), + lang_code character varying(3), + cr_by character varying(256), + cr_dtimes timestamp, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_otptrn_id PRIMARY KEY (id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_kernel_otp_transaction IS 'OTP Transaction: All OTP related data and validation details are maintained here. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.id IS 'ID: Unique transaction id for each otp transaction request'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.ref_id IS 'Reference ID: Reference ID is a reference information received from OTP requester which can be used while validating the OTP. AM: please give examples of ref_id'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.ref_id_type IS 'Reference ID Type: Type of information in Reference ID field, used to reference this OTP request. AM: i guess only email & ph are the types. Please specify'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.otp IS 'OTP: One Time Pin which is generated based on the configuration setup and sent to the requester application / module.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.generated_dtimes IS 'Generated Date Time: Date and Time when the OTP was generated'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.expiry_dtimes IS 'Expiry Date Time: Date Time when the OTP will be expired'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.validation_retry_count IS 'Validation Retry Count: Validation retry counts of this OTP request. If the validation retry crosses the threshold limit, then the OTP will be de-activated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.status_code IS 'Status Code: Status of the OTP whether it is active or expired. AM: please enumerate the status types. They are only a few, not infinite'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_kernel_otp_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-master-bulkupload_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-master-bulkupload_transaction.sql new file mode 100644 index 00000000..7d8149e0 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-master-bulkupload_transaction.sql @@ -0,0 +1,61 @@ + +-- object: archive.mosip_master_bulkupload_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_master_bulkupload_transaction CASCADE; +CREATE TABLE archive.mosip_master_bulkupload_transaction( + id character varying(36) NOT NULL, + entity_name character varying(64) NOT NULL, + upload_operation character varying(64) NOT NULL, + status_code character varying(36) NOT NULL, + record_count integer, + uploaded_by character varying(256) NOT NULL, + upload_category character varying(36), + uploaded_dtimes timestamp NOT NULL, + upload_description character varying, + lang_code character varying(3) NOT NULL, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_butrn_id PRIMARY KEY (id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_master_bulkupload_transaction IS 'Bulk Upload Transaction: This transaction table to store all bulk upload transactions, This includes uploading master table data as well as packets uploads to the registration processor.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.id IS 'ID: Unigue ID is assign to bulk data upload transaction. Each transaction will be identifieds by this transaction id.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.entity_name IS 'Entity Name: Name of an entity for which data is beeing uploaded, This can be master data table name or packet in case of packet upload.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.upload_operation IS 'Upload Operation: Operation of the bulk upload example operations can be insert, update, delete and packet-upload.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.status_code IS 'Status Code: Status of the bulk upload transactions. For example... in-progress, success and failed transaction.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.record_count IS 'Record Count: Number of records and packets has been uploaded in a transaction.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.uploaded_by IS 'Uploaded By: The user detail of the person who is uploading the packets or master data.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.upload_category IS 'Upload Category: Upload category will be Master data csv or Packet'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.uploaded_dtimes IS 'Uploaded Date and Time: Date and time of the master data and packets are uploaded'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.upload_description IS 'Upload Description: Bulk data upload description, This will have all details about the transaction including upload failure or success messages.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.is_active IS 'IS_Active : Flag to mark whether the record/device is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_bulkupload_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-master-device_master_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-master-device_master_h.sql new file mode 100644 index 00000000..a675cc5b --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-master-device_master_h.sql @@ -0,0 +1,65 @@ + + +-- object: archive.mosip_master_device_master_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_master_device_master_h CASCADE; +CREATE TABLE archive.mosip_master_device_master_h( + id character varying(36) NOT NULL, + name character varying(64) NOT NULL, + mac_address character varying(64) NOT NULL, + serial_num character varying(64) NOT NULL, + ip_address character varying(17), + validity_end_dtimes timestamp, + dspec_id character varying(36) NOT NULL, + zone_code character varying(36) NOT NULL, + regcntr_id character varying(10), + lang_code character varying(3) , + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + eff_dtimes timestamp NOT NULL, + CONSTRAINT pk_devicem_h_id PRIMARY KEY (id,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_master_device_master_h IS 'Device Master History : This to track changes to master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer master.device_master table description for details. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.id IS 'Device ID : Unique ID generated / assigned for device'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.name IS 'Name : Device name'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.mac_address IS 'Mac Address: Mac address of the device'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.serial_num IS 'Serial Number: Serial number of the device'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.ip_address IS 'IP Address: IP address of the device'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.validity_end_dtimes IS 'Validity End Datetime: Device validity expiry date'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.dspec_id IS 'Device Specification ID : Device specification id refers to master.device_spec.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.zone_code IS 'Zone Code : Unique zone code generated or entered by admin while creating zones, It is referred to master.zone.code. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.regcntr_id IS 'Registration Center ID : registration center id refers to master.registration_center.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_device_master_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time. '; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-master-machine_master_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-master-machine_master_h.sql new file mode 100644 index 00000000..4b41c0ef --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-master-machine_master_h.sql @@ -0,0 +1,48 @@ +CREATE TABLE archive.mosip_master_machine_master_h( + id character varying(10) NOT NULL, + name character varying(64) NOT NULL, + mac_address character varying(64), + serial_num character varying(64), + ip_address character varying(17), + validity_end_dtimes timestamp, + mspec_id character varying(36) NOT NULL, + public_key character varying(1024), + key_index character varying(128), + sign_public_key character varying(1024), + sign_key_index character varying(128), + zone_code character varying(36) NOT NULL, + regcntr_id character varying(10), + lang_code character varying(3) , + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + eff_dtimes timestamp NOT NULL, + CONSTRAINT pk_machm_h_id PRIMARY KEY (id,eff_dtimes) +); +COMMENT ON TABLE archive.mosip_master_machine_master_h IS 'Machine Master History : This to track changes to master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer master.machine_master table description for details. '; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.id IS 'Machine ID : Unique ID generated / assigned for machine'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.name IS 'Name : Machine name'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.mac_address IS 'Mac Address: Mac address of the machine'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.serial_num IS 'Serial Number: Serial number of the machine'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.ip_address IS 'IP Address: IP address of the machine'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.validity_end_dtimes IS 'Validity End Datetime: Machine validity expiry date'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.mspec_id IS 'Machine Specification ID : Machince specification id refers to master.machine_spec.id'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.public_key IS 'Public Key: Public key of the machine, This will be Machine Identification TPM Endorsement key'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.key_index IS 'Key Index: Fingerprint[Unique Hash ] for the TPM public key'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.sign_public_key IS 'Signed Public Key: Field for signature verification publicKey'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.sign_key_index IS 'Signed Key Index: Field for signature verification public key fingerprint'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.zone_code IS 'Zone Code : Unique zone code generated or entered by admin while creating zones, It is referred to master.zone.code. '; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.regcntr_id IS 'Registration Center ID : registration center id refers to master.registration_center.id'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +COMMENT ON COLUMN archive.mosip_master_machine_master_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time. '; diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-master-registration_center_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-master-registration_center_h.sql new file mode 100644 index 00000000..28774a9f --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-master-registration_center_h.sql @@ -0,0 +1,101 @@ + + +-- object: archive.mosip_master_registration_center_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_master_registration_center_h CASCADE; +CREATE TABLE archive.mosip_master_registration_center_h( + id character varying(10) NOT NULL, + name character varying(128) NOT NULL, + cntrtyp_code character varying(36), + addr_line1 character varying(256), + addr_line2 character varying(256), + addr_line3 character varying(256), + latitude character varying(32), + longitude character varying(32), + location_code character varying(36) NOT NULL, + contact_phone character varying(16), + contact_person character varying(128), + number_of_kiosks smallint, + working_hours character varying(32), + per_kiosk_process_time time, + center_start_time time, + center_end_time time, + lunch_start_time time, + lunch_end_time time, + time_zone character varying(64), + holiday_loc_code character varying(36), + zone_code character varying(36) NOT NULL, + lang_code character varying(3) NOT NULL, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE , + del_dtimes timestamp, + eff_dtimes timestamp NOT NULL, + CONSTRAINT pk_regcntr_h_code PRIMARY KEY (id,lang_code,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_master_registration_center_h IS 'Registration Center History : This to track changes to master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer master.registration_center table description for details.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.id IS 'Registration Center ID : Unique ID generated / assigned for a registration center'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.name IS 'Name : Registration center name'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.cntrtyp_code IS 'Center Type Code : different types of registration centers. Refers master.reg_center_type.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.addr_line1 IS 'Registration Center Address Line1 : for ex. Number, street name, locality, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.addr_line2 IS 'Registration Center Address Line2 : for ex. Number, street name, locality, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.addr_line3 IS 'Registration Center Address Line3 : for ex. locality, landmark, area etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.latitude IS 'Latitude: Latitude of the registration center location as per GPS standards / format'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.longitude IS 'Longitude: Longitude of the registration center location as per GPS standards / format'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.location_code IS 'Location Code: Location code of the registration center located. Refers to master.location.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.contact_phone IS 'Contact Phone : Phone number of of the person to be contacted for any additional details.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.contact_person IS 'Contact Person : Name of the person to be contacted for any additional details.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.number_of_kiosks IS 'Number of Kiosks: Total number of kiosks available at a registration center'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.working_hours IS 'Working hours: Working hours of a registration center (8.00 AM - 6.00 PM) '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.per_kiosk_process_time IS 'Process Time Per Registration: Average process time for registration process per kiosk'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.center_start_time IS 'Center Start Time : registration center working opening hour / start time.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.center_end_time IS 'Center End Time : registration center working closing hour / end time.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.lunch_start_time IS 'Lunch Start Time: Registration centers lunch break start time'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.lunch_end_time IS 'Lunch End Time: Registration centers lunch break end time'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.time_zone IS 'Time Zone: Registration centers local timezone GMT, PST, IST'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.holiday_loc_code IS 'Holiday Location Code: Location code at which holidays are defined'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.zone_code IS 'Zone Code : Unique zone code generated or entered by admin while creating zones, It is referred to master.zone.code. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_registration_center_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time. '; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-master-user_detail_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-master-user_detail_h.sql new file mode 100644 index 00000000..7d3f161a --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-master-user_detail_h.sql @@ -0,0 +1,56 @@ + + +-- object: archive.mosip_master_user_detail_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_master_user_detail_h CASCADE; +CREATE TABLE archive.mosip_master_user_detail_h( + id character varying(256) NOT NULL, + name character varying(64) NOT NULL, + status_code character varying(36), + regcntr_id character varying(10), + lang_code character varying(3), + last_login_dtimes timestamp, + last_login_method character varying(64), + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + eff_dtimes timestamp NOT NULL, + CONSTRAINT pk_usrdtl_h_id PRIMARY KEY (id,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_master_user_detail_h IS 'User Detail History : This to track changes to master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer master.user_detail table description for details.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.id IS 'User ID : Unique ID generated / assigned for a user'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.name IS 'Name : User name'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.status_code IS 'Status Code: User status. Refers to master.status_master.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.regcntr_id IS 'Registration Center ID : registration center id refers to master.registration_center.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.last_login_dtimes IS 'Last Login Datetime: Date and time of the last login by the user'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.last_login_method IS 'Last Login Method: Previous login method in which the user logged into the system'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_user_detail_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time. '; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-master-zone_user_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-master-zone_user_h.sql new file mode 100644 index 00000000..052275b5 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-master-zone_user_h.sql @@ -0,0 +1,44 @@ + + +-- object: archive.mosip_master_zone_user_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_master_zone_user_h CASCADE; +CREATE TABLE archive.mosip_master_zone_user_h( + zone_code character varying(36) NOT NULL, + usr_id character varying(256) NOT NULL, + lang_code character varying(3), + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + eff_dtimes timestamp NOT NULL, + CONSTRAINT pk_zoneuserh PRIMARY KEY (zone_code,usr_id,eff_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_master_zone_user_h IS 'Zone User History : This to track changes to master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer master.zone_user table description for details.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.zone_code IS 'Code : Unique zone code generated or entered by admin while creating zones. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.usr_id IS 'User ID : ID of the user which is mapped to zone, This user will have defined roles based on which user is mapped to zones.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_master_zone_user_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time. '; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql deleted file mode 100644 index 7034abf6..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-otp_transaction.sql +++ /dev/null @@ -1,62 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.otp_transaction --- Purpose : OTP Transaction: All OTP related data and validation details are maintained here for ID Authentication --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ --- object: archive.otp_transaction | type: TABLE -- --- DROP TABLE IF EXISTS archive.otp_transaction CASCADE; -CREATE TABLE archive.otp_transaction( - id character varying(36) NOT NULL, - ref_id character varying(64) NOT NULL, - otp_hash character varying(512) NOT NULL, - generated_dtimes timestamp, - expiry_dtimes timestamp, - validation_retry_count smallint, - status_code character varying(36), - lang_code character varying(3), - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - CONSTRAINT pk_otpt_id PRIMARY KEY (id) - -); --- ddl-end -- -COMMENT ON TABLE archive.otp_transaction IS 'OTP Transaction: All OTP related data and validation details are maintained here for ID Authentication module.'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.id IS 'ID: Key alias id is a unique identifier (UUID) used as an alias of the encryption key stored in keystore like HSM (hardware security module).'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.ref_id IS 'Reference ID: Reference ID is a reference information received from OTP requester which can be used while validating the OTP. AM: please give examples of ref_id'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.otp_hash IS 'OTP Hash: Hash of id, ref_id and otp which is generated based on the configuration setup and sent to the requester application / module.'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.generated_dtimes IS 'Generated Date Time: Date and Time when the OTP was generated'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.expiry_dtimes IS 'Expiry Date Time: Date Time when the OTP will be expired'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.validation_retry_count IS 'Validation Retry Count: Validation retry counts of this OTP request. If the validation retry crosses the threshold limit, then the OTP will be de-activated.'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.status_code IS 'Status Code: Status of the OTP whether it is active or expired. AM: please enumerate the status types. They are only a few, not infinite'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.otp_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-pms-auth_policy_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-pms-auth_policy_h.sql new file mode 100644 index 00000000..afe09ee6 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-pms-auth_policy_h.sql @@ -0,0 +1,81 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_pms +-- Table Name : archive.mosip_pms_auth_policy_h +-- Purpose : Authentication Policy History: This to track changes to master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer pmp.auth_policy table description for details. +-- +-- Create By : Sadanandegowda DM +-- Created Date : Aug-2020 +-- +-- Modified Date Modified By Comments / Remarks +-- ------------------------------------------------------------------------------------------ +-- Aug-2020 Sadanndegowda Added policy_type, version, schema and validity +-- Aug-2020 Sadanndegowda Updated Schema name +-- Jan-2021 Ram Bhatt Set is_deleted flag to not null and default false +-- Mar-2021 Ram Bhatt Reverting is_deleted flag not null changes for 1.1.5 +-- ------------------------------------------------------------------------------------------ + +-- object: archive.mosip_pms_auth_policy_h | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_pms_auth_policy_h CASCADE; +CREATE TABLE archive.mosip_pms_auth_policy_h( + id character varying(36) NOT NULL, + eff_dtimes timestamp NOT NULL, + policy_group_id character varying(36), + name character varying(128) NOT NULL, + descr character varying(256) NOT NULL, + policy_file_id character varying(5120) NOT NULL, + policy_type character varying(36) NOT NULL, + version character varying(8) NOT NULL, + policy_schema character varying(5120), + valid_from_date timestamp NOT NULL, + valid_to_date timestamp NOT NULL, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_apolh PRIMARY KEY (id,eff_dtimes), + CONSTRAINT uk_apolh UNIQUE (eff_dtimes,policy_group_id,name) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_pms_auth_policy_h IS 'Authentication Policy History: This to track changes to master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer pmp.auth_policy table description for details. +'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.id IS 'ID: A unique identity '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time. '; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.policy_group_id IS 'Polocy Group ID: Id of the policy group to which this policy belongs.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.name IS 'Name: Name of the policy'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.descr IS 'Description: Description of the policy'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.policy_file_id IS 'Policy File ID: Policy are defined by Policy / Partner manager are stored in file system or key based storages like CEPH. The policy file details (location / id / key) is stored here.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.policy_type IS 'Policy Type: Type of the policy for example authentication, Data_Share, Credential_Issuance...etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.version IS 'Policy Version : Version number of the policy, Version to be upgraded based on changes to the policy'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.policy_schema IS 'Policy Schema: Policy schema, schema is populated based on policy type'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.valid_from_date IS 'Policy Valid From Date: Date and time from when the policy is valid'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.valid_to_date IS 'Valid To Date: Date and time till when the policy is valid'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_pms_auth_policy_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-pms-partner_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-pms-partner_h.sql new file mode 100644 index 00000000..ca15094b --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-pms-partner_h.sql @@ -0,0 +1,45 @@ + +CREATE TABLE archive.mosip_pms_partner_h( + id character varying(36) NOT NULL, + eff_dtimes timestamp NOT NULL, + policy_group_id character varying(36), + name character varying(128) NOT NULL, + address character varying(2000), + contact_no character varying(16), + email_id character varying(254), + certificate_alias character varying(128), + user_id character varying(256) NOT NULL, + partner_type_code character varying(36) NOT NULL, + approval_status character varying(36) NOT NULL, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + lang_code character varying(36), + logo_url character varying(256), + addl_info character varying, + CONSTRAINT pk_parth PRIMARY KEY (id,eff_dtimes) +); + +COMMENT ON TABLE archive.mosip_pms_partner_h IS 'Partner History: This to track changes to master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer pmp.auth_policy table description for details. '; +COMMENT ON COLUMN archive.mosip_pms_partner_h.id IS 'Partner ID : Unique ID generated / assigned for partner'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time. '; +COMMENT ON COLUMN archive.mosip_pms_partner_h.policy_group_id IS 'Policy Group ID: Policy group to which the partner registers for to avail the auth services.'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.name IS 'Name: Name of the Partner.'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.address IS 'Address: Address of the partner organization'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.contact_no IS 'Contact Number: Contact number of the partner organization or the contact person'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.email_id IS 'Email ID: Email ID of the MISP organization''s contact person'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.certificate_alias IS 'Certificate Alias: Certificate alias provided by the partner to MOSIP to use its authentication request data.'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.user_id IS 'Partner Admin: When a partner registers themselves to avail auth services, a user id is created for them to login to partner management portal to perform few operational activities. Currently only one user is created per partner.'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.partner_type_code IS 'Partner Type Code: Partner type code for different type of partners... Referenced from pmp.partner_type table'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.approval_status IS 'Approval Status: Status of the partner. Status gives the partner status is pending, approved or rejected by partner admin'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +COMMENT ON COLUMN archive.mosip_pms_partner_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-pms-secure_biometric_interface_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-pms-secure_biometric_interface_h.sql new file mode 100644 index 00000000..b7133732 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-pms-secure_biometric_interface_h.sql @@ -0,0 +1,37 @@ +-- TODO: check if provider_id non null constraint is required. +CREATE TABLE archive.mosip_pms_secure_biometric_interface_h( + id character varying(36) NOT NULL, + sw_binary_hash bytea NOT NULL, + sw_version character varying(64) NOT NULL, + sw_cr_dtimes timestamp, + sw_expiry_dtimes timestamp, + approval_status character varying(36) NOT NULL, + is_active boolean NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + eff_dtimes timestamp NOT NULL, + -- provider_id character varying(36) NOT NULL, + provider_id character varying(36), + partner_org_name character varying(128), + CONSTRAINT pk_mdsh_id PRIMARY KEY (id,eff_dtimes) +); + +COMMENT ON TABLE archive.mosip_pms_secure_biometric_interface_h IS 'MOSIP Secure Biometric Interface History : History of changes of any MOSIP secure biometric interface will be stored in history table to track any chnages for future validations.'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.id IS 'ID: Unigue service ID, Service ID is geerated by the MOSIP system'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.sw_binary_hash IS 'Software Binary Hash : Its is a software binary stored in MOSIP system for the devices'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.sw_version IS 'Software Version : Version of the stored software'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.sw_cr_dtimes IS 'Software Created Date Time: Date and Time on which this software is created'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.sw_expiry_dtimes IS 'Software Expiry Date Time: Expiry date and time of the device software'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.approval_status IS 'Approval Status'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.is_active IS 'IS_Active : Flag to mark whether the record/device is Active or In-active'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +COMMENT ON COLUMN archive.mosip_pms_secure_biometric_interface_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time.'; diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql deleted file mode 100644 index fecc7d03..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-processed_prereg_list.sql +++ /dev/null @@ -1,41 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.processed_prereg_list --- Purpose : Table to store all the pre-registration list received from registration processor within pre-registration module --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ --- object: archive.processed_prereg_list | type: TABLE -- --- DROP TABLE IF EXISTS archive.processed_prereg_list CASCADE; -CREATE TABLE archive.processed_prereg_list( - prereg_id character varying(36) NOT NULL, - first_received_dtimes timestamp NOT NULL, - status_code character varying(36) NOT NULL, - status_comments character varying(1024), - prereg_trn_id character varying(36), - lang_code character varying(3) NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - CONSTRAINT pprlst_pk PRIMARY KEY (prereg_id) - -); --- ddl-end -- -COMMENT ON TABLE archive.processed_prereg_list IS 'Table to store all the pre-registration list received from registration processor within pre-registration module'; --- ddl-end -- -COMMENT ON COLUMN archive.processed_prereg_list.prereg_id IS 'Pre-registration id that was consumed by registration processor to generate UIN'; --- ddl-end -- -COMMENT ON COLUMN archive.processed_prereg_list.first_received_dtimes IS 'Datetime when the pre-registration id was first recevied'; --- ddl-end -- -COMMENT ON COLUMN archive.processed_prereg_list.status_code IS 'status of the pre-registration status update into actual tables'; --- ddl-end -- -COMMENT ON COLUMN archive.processed_prereg_list.status_comments IS 'status comments of the pre-registration status update into actual tables'; --- ddl-end -- - diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql deleted file mode 100644 index 630f4e46..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_appointment_consumed.sql +++ /dev/null @@ -1,58 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.reg_appointment_consumed --- Purpose : Registration Appointment Consumed: Stores all the appointment requests booked by an individual at a registration center that are consumed --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ --- object: archive.reg_appointment_consumed | type: TABLE -- --- DROP TABLE IF EXISTS archive.reg_appointment_consumed CASCADE; -CREATE TABLE archive.reg_appointment_consumed( - id character varying(36) NOT NULL, - regcntr_id character varying(10) NOT NULL, - prereg_id character varying(36) NOT NULL, - booking_dtimes timestamp NOT NULL, - appointment_date date, - slot_from_time time, - slot_to_time time, - lang_code character varying(3) NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - CONSTRAINT pk_rappmntc_id PRIMARY KEY (id), - CONSTRAINT uk_rappmntc_id UNIQUE (prereg_id) - -); --- ddl-end -- -COMMENT ON TABLE archive.reg_appointment_consumed IS 'Registration Appointment Consumed: Stores all the appointment requests booked by an individual at a registration center that are consumed. '; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.id IS 'ID: Unique id generated for the registration appointment booking.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.regcntr_id IS 'Registration Center ID: Id of the Registration Center where the appointment is taken. Refers to master.registration_center.id'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.prereg_id IS 'Pre-Registration Id: Pre-registration id for which registration appointment is taken.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.booking_dtimes IS 'Booking Date Time: Date and Time when the appointment booking is done.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.appointment_date IS 'Appointment Date: Date for which an individual has taken an aopointment for registration at a registration center'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.slot_from_time IS 'Slot From Time: Start time of the appointment slot.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.slot_to_time IS 'Slot To Time: End time of the appointment slot.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.cr_by IS 'Created By : ID or name of the user who create / insert record.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_appointment_consumed.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- - diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql deleted file mode 100644 index b2612231..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_demo_dedupe_list.sql +++ /dev/null @@ -1,48 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.reg_demo_dedupe_list --- Purpose : Registration Demographic Deduplication List: List of matched UIN / RIDs, as part of demographic data. --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.reg_demo_dedupe_list | type: TABLE -- --- DROP TABLE IF EXISTS archive.reg_demo_dedupe_list CASCADE; -CREATE TABLE archive.reg_demo_dedupe_list( - regtrn_id character varying(36) NOT NULL, - matched_reg_id character varying(39) NOT NULL, - reg_id character varying(39) NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - CONSTRAINT pk_regded PRIMARY KEY (matched_reg_id,regtrn_id) - -); --- ddl-end -- -COMMENT ON TABLE archive.reg_demo_dedupe_list IS 'Registration Demographic Deduplication List: List of matched UIN / RIDs, as part of demographic data.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_demo_dedupe_list.regtrn_id IS 'Registration Transaction ID: ID of the demo dedupe transaction, Refers to archive.registration_transaction.id'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_demo_dedupe_list.matched_reg_id IS 'Matched Registration ID: Registration ID of the individual matching with the host registration id. It can be RID or any other id related to an individual.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_demo_dedupe_list.reg_id IS 'Registration ID: Registration ID for which the matches are found as part of the demographic dedupe process.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_demo_dedupe_list.cr_by IS 'Created By : ID or name of the user who create / insert record.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_demo_dedupe_list.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_demo_dedupe_list.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_demo_dedupe_list.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_demo_dedupe_list.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_demo_dedupe_list.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql deleted file mode 100644 index 69593e58..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-reg_manual_verification.sql +++ /dev/null @@ -1,73 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.reg_manual_verification --- Purpose : Manual Verification: Stores all the registration request which goes through manual verification process, registration can be assinged to single/multiple manual verifier as part of the verification process --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.reg_manual_verification | type: TABLE -- --- DROP TABLE IF EXISTS archive.reg_manual_verification CASCADE; -CREATE TABLE archive.reg_manual_verification( - reg_id character varying(39) NOT NULL, - matched_ref_id character varying(39) NOT NULL, - matched_ref_type character varying(36) NOT NULL, - mv_usr_id character varying(256), - matched_score numeric(6,3), - status_code character varying(36), - reason_code character varying(36), - status_comment character varying(256), - trntyp_code character varying(36), - lang_code character varying(3) NOT NULL, - is_active boolean NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - CONSTRAINT pk_rmnlver_id PRIMARY KEY (reg_id,matched_ref_id,matched_ref_type) - -); --- ddl-end -- -COMMENT ON TABLE archive.reg_manual_verification IS 'Manual Verification: Stores all the registration request which goes through manual verification process, registration can be assinged to single/multiple manual verifier as part of the verification process'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.reg_id IS 'Registration ID: ID of the registration request'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.matched_ref_id IS 'Mached Reference ID: Reference ID of the mached registrations, This id can be RID'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.matched_ref_type IS 'Mached reference ID Type: Type of the Reference ID'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.mv_usr_id IS 'Manual Verifier ID: User ID of the manual verifier'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.matched_score IS 'Mached Score: Mached score as part deduplication process, This will be the combined score of multiple ABISapplications'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.status_code IS 'Status Code : Status of the manual verification'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.reason_code IS 'Reason Code : Reason code provided by the manual verifier on reason for approve or reject the registration request as part of the verification process'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.status_comment IS 'Status Comment: Comments captured as part of manual verification process'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.trntyp_code IS 'Transaction Type Code : Code of the transaction type'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.cr_by IS 'Created By : ID or name of the user who create / insert record.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.reg_manual_verification.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- - diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql deleted file mode 100644 index a50a5f00..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-registered_authdevice_master_h.sql +++ /dev/null @@ -1,82 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.registered_authdevice_master_h --- Purpose : --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.registered_authdevice_master_h | type: TABLE -- --- DROP TABLE IF EXISTS archive.registered_authdevice_master_h CASCADE; -CREATE TABLE archive.registered_authdevice_master_h( - code character varying(36) NOT NULL, - status_code character varying(64), - device_id character varying(256) NOT NULL, - device_sub_id character varying(1024), - digital_id character varying(1024) NOT NULL, - serial_number character varying(64) NOT NULL, - device_detail_id character varying(36) NOT NULL, - purpose character varying(64) NOT NULL, - firmware character varying(128), - expiry_date timestamp, - certification_level character varying(3), - foundational_trust_provider_id character varying(36), - hotlisted boolean, - is_active boolean NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - eff_dtimes timestamp NOT NULL, - CONSTRAINT pk_authdevicemh_code PRIMARY KEY (code,eff_dtimes) - -); --- ddl-end -- -COMMENT ON TABLE archive.registered_authdevice_master_h IS 'Registered Device History : History of changes of any MOSIP device registration will be stored in history table to track any chnages for future validations.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.code IS 'Registred Device Code : Unique ID generated / assigned for device which is registred in MOSIP system for the purpose'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.status_code IS 'Status Code : Status of the registered devices, The status code can be Registered, De-Registered or Retired/Revoked.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.device_id IS 'Device ID: Device ID is the unigue id provided by device provider for each device'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.device_sub_id IS 'Device Sub ID: Sub ID of the devices, Each device can have an array of sub IDs.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.digital_id IS 'Digital ID: Digital ID received as a Json value containing below values like Serial number of the device, make , model, type, provider details..etc'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.serial_number IS 'Serial Number : Serial number of the device, This will be the Unique ID of the device by the provider'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.device_detail_id IS 'Device Detail ID'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.purpose IS 'Purpose : Purpose of these devices in the MOSIP system. ex. Registrations, Authentication, eKYC...etc'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.firmware IS 'Firmware: Firmware used in devices'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.expiry_date IS 'Expiry Date: expiry date of the device'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.certification_level IS 'Certification Level: Certification level for the device, This can be L0 or L1 devices'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.foundational_trust_provider_id IS 'Foundational Trust Provider ID: Foundational trust provider ID, This will be soft referenced from master.foundational_trust_provider.id. Required only for L1 devices.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_authdevice_master_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time.'; --- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql deleted file mode 100644 index 96b89b95..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-registered_regdevice_master_h.sql +++ /dev/null @@ -1,82 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.registered_regdevice_master_h --- Purpose : Registered Device History : History of changes of any MOSIP device registration will be stored in history table to track any chnages for future validations. --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.registered_regdevice_master_h | type: TABLE -- --- DROP TABLE IF EXISTS archive.registered_regdevice_master_h CASCADE; -CREATE TABLE archive.registered_regdevice_master_h( - code character varying(36) NOT NULL, - status_code character varying(64), - device_id character varying(256) NOT NULL, - device_sub_id character varying(1024), - digital_id character varying(1024) NOT NULL, - serial_number character varying(64) NOT NULL, - device_detail_id character varying(36) NOT NULL, - purpose character varying(64) NOT NULL, - firmware character varying(128), - expiry_date timestamp, - certification_level character varying(3), - foundational_trust_provider_id character varying(36), - hotlisted boolean, - is_active boolean NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - eff_dtimes timestamp NOT NULL, - CONSTRAINT pk_regdevicemh_code PRIMARY KEY (code,eff_dtimes) - -); --- ddl-end -- -COMMENT ON TABLE archive.registered_regdevice_master_h IS 'Registered Device History : History of changes of any MOSIP device registration will be stored in history table to track any chnages for future validations.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.code IS 'Registred Device Code : Unique ID generated / assigned for device which is registred in MOSIP system for the purpose'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.status_code IS 'Status Code : Status of the registered devices, The status code can be Registered, De-Registered or Retired/Revoked.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.device_id IS 'Device ID: Device ID is the unigue id provided by device provider for each device'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.device_sub_id IS 'Device Sub ID: Sub ID of the devices, Each device can have an array of sub IDs.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.digital_id IS 'Digital ID: Digital ID received as a Json value containing below values like Serial number of the device, make , model, type, provider details..etc'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.serial_number IS 'Serial Number : Serial number of the device, This will be the Unique ID of the device by the provider'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.device_detail_id IS 'Device Detail ID'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.purpose IS 'Purpose : Purpose of these devices in the MOSIP system. ex. Registrations, Authentication, eKYC...etc'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.firmware IS 'Firmware: Firmware used in devices'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.expiry_date IS 'Expiry Date: expiry date of the device'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.certification_level IS 'Certification Level: Certification level for the device, This can be L0 or L1 devices'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.foundational_trust_provider_id IS 'Foundational Trust Provider ID: Foundational trust provider ID, This will be soft referenced from master.foundational_trust_provider.id. Required only for L1 devices.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.is_active IS 'IS_Active : Flag to mark whether the record is Active or In-active'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- -COMMENT ON COLUMN archive.registered_regdevice_master_h.eff_dtimes IS 'Effective Date Timestamp : This to track master record whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time.'; --- ddl-end -- \ No newline at end of file diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql deleted file mode 100644 index 16ab0bf8..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-registration_transaction.sql +++ /dev/null @@ -1,72 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.registration_transaction --- Purpose : Registration Transaction: Registration Processor Transaction table is to store ALL Registration Processor packet processing/process transaction details for ID issuance --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.registration_transaction | type: TABLE -- --- DROP TABLE IF EXISTS archive.registration_transaction CASCADE; -CREATE TABLE archive.registration_transaction( - id character varying(36) NOT NULL, - reg_id character varying(39) NOT NULL, - trn_type_code character varying(64) NOT NULL, - remarks character varying(256), - parent_regtrn_id character varying(36), - ref_id character varying(64), - ref_id_type character varying(64), - status_code character varying(36) NOT NULL, - sub_status_code character varying(36) NOT NULL, - lang_code character varying(3) NOT NULL, - status_comment character varying(256), - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - CONSTRAINT pk_regtrn_id PRIMARY KEY (id) - -); --- ddl-end -- -COMMENT ON TABLE archive.registration_transaction IS 'Registration Transaction: Registration Processor Transaction table is to store ALL Registration Processor packet processing/process transaction details for ID issuance'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.id IS 'ID: Transaction id of the transactions that were recorded in registration module/application'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.reg_id IS 'Registration ID: Registration id for which these transactions are carried out at the registration client application.'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.trn_type_code IS 'Transaction Type Code: Type of transaction being processed. Refers to reg.transaction_type.code'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.remarks IS 'Transaction Remarks: Current remarks/comments of the transaction'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.parent_regtrn_id IS 'Parent Registration ID: Parent transaction id that has triggered this transaction (if any). Refers to reg.registration_transaction.id'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.ref_id IS 'Reference ID: Reference id for the transaction if any'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.ref_id_type IS 'reference ID Type: reference ID type of the transaction if any'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.status_code IS 'Status Code: Current status of the transaction. Refers to code field of master.status_list table.'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.sub_status_code IS 'Sub Status Code: Current sub status of the registration transaction. Refers to code field of master.status_list table.'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.status_comment IS 'Status Comment: Comments provided by the actor during the transaction processing.'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record.'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.registration_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_request.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_request.sql new file mode 100644 index 00000000..53d613a9 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_request.sql @@ -0,0 +1,67 @@ + + +-- object: archive.mosip_regprc_abis_request | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_regprc_abis_request CASCADE; +CREATE TABLE archive.mosip_regprc_abis_request( + id character varying(36) NOT NULL, + req_batch_id character varying(36) NOT NULL, + abis_app_code character varying(36) NOT NULL, + request_type character varying(64) NOT NULL, + request_dtimes timestamp NOT NULL, + bio_ref_id character varying(36), + ref_regtrn_id character varying(36), + req_text bytea, + status_code character varying(36) NOT NULL, + status_comment character varying(256), + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_abisreq PRIMARY KEY (id), + CONSTRAINT uk_abisreq_ref UNIQUE (req_batch_id,abis_app_code) + +); +-- ddl-end -- +-- index creation starts-- +CREATE INDEX IF NOT EXISTS idx_user_detail_cntr_id ON archive.mosip_regprc_abis_request USING btree (bio_ref_id); +CREATE INDEX IF NOT EXISTS idx_abis_req_regtrn_id ON archive.mosip_regprc_abis_request USING btree (ref_regtrn_id); +-- index creation ends-- +COMMENT ON TABLE archive.mosip_regprc_abis_request IS 'ABIS Request: Stores all the requests that were sent to ABIS systems'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.id IS 'Request ID: System generated id, used to track all the ABIS request sent to ABIS applications.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.req_batch_id IS 'Request Batch ID: ABIS Request Batch ID to track all the requests that was sent to different ABIS systems. This will also be used to track the responses received for the ABIS systems to manage futher flows.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.abis_app_code IS 'ABIS Application Code: Code of the ABIS application to which the transaction request is being done.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.request_type IS 'Request Type: Type of request that was sent to ABIS application. Eg. INSERT, IDENTIFY, etc.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.request_dtimes IS 'Request Data Time: Date and time when the ABIS request was created.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.bio_ref_id IS 'Biometric Reference ID: Biometric Reference ID of the host registration id for which requests are being sent to ABIS application.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.ref_regtrn_id IS 'Reference Transaction ID: ID of the reference registration transaction for which the ABIS transaction request is being initiated.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.req_text IS 'Requesst Text: Information that was passed to the ABIS system as part of this request.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.status_code IS 'Status Code: Current Status code of the ABIS request transaction. Refers to master.status_list.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.status_comment IS 'Status Comment: Comments captured as part of packet processing (if any). This can be used in case someone wants to abort the transaction, comments can be provided as additional information.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.lang_code IS 'Language Code: Code of the language used while creating this ABIS transaction.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_request.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_response.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_response.sql new file mode 100644 index 00000000..fd56f974 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_response.sql @@ -0,0 +1,51 @@ + + +-- object: archive.mosip_regprc_abis_response | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_regprc_abis_response CASCADE; +CREATE TABLE archive.mosip_regprc_abis_response( + id character varying(36) NOT NULL, + abis_req_id character varying(36), + resp_dtimes timestamp NOT NULL, + resp_text bytea, + status_code character varying(32) NOT NULL, + status_comment character varying(256), + lang_code character varying(3) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_abisresp PRIMARY KEY (id), + CONSTRAINT uk_abisresp UNIQUE (abis_req_id,resp_dtimes) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_regprc_abis_response IS 'ABIS Response: Stores all the responses that were received from ABIS systems for the request sent.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.id IS 'Response Id: Id of the response received from ABIS application. This is a system generated unique number, can be UUID. This will be used in reference tables'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.abis_req_id IS 'ABIS Request ID: Request id of the ABIS transaction for which ABIS response is received. This request id refers to regprc.abis_request.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.resp_dtimes IS 'Response Date Time: Data and Time when the response was received.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.resp_text IS 'Response Text: Text of the response that was received from the ABIS application.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.status_code IS 'Status Code: Current Status code of the ABIS reponse transaction. Refers to master.status_list.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.status_comment IS 'Status Comment: Comments captured as part of packet processing (if any). This can be used in case someone wants to abort the transaction, comments can be provided as additional information.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_response_det.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_response_det.sql new file mode 100644 index 00000000..95a9bd13 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-abis_response_det.sql @@ -0,0 +1,35 @@ + + +-- object: archive.mosip_regprc_abis_response_det | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_regprc_abis_response_det CASCADE; +CREATE TABLE archive.mosip_regprc_abis_response_det( + abis_resp_id character varying(36) NOT NULL, + matched_bio_ref_id character varying(36) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_abisrdt PRIMARY KEY (matched_bio_ref_id,abis_resp_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_regprc_abis_response_det IS 'ABIS Response Detail: Stores details of all the ABIS responses received from ABIS system. Response details will mainly have scores, which is applicable only for identify request type.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response_det.abis_resp_id IS 'ABIS Response ID: Response id of the ABIS transaction for which ABIS response details are received. This response id refers to regprc.abis_response.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response_det.matched_bio_ref_id IS 'Matched BIO Reference ID: Bio Reference IDs that are potential matches with the host reference id as rececived by an ABIS application.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response_det.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response_det.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response_det.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response_det.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response_det.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_abis_response_det.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-reg_demo_dedupe_list.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-reg_demo_dedupe_list.sql new file mode 100644 index 00000000..ad17aeee --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-reg_demo_dedupe_list.sql @@ -0,0 +1,38 @@ + + +-- object: archive.mosip_regprc_reg_demo_dedupe_list | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_regprc_reg_demo_dedupe_list CASCADE; +CREATE TABLE archive.mosip_regprc_reg_demo_dedupe_list( + regtrn_id character varying(36) NOT NULL, + matched_reg_id character varying(39) NOT NULL, + reg_id character varying(39) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_regded PRIMARY KEY (matched_reg_id,regtrn_id) + +); +-- ddl-end -- +COMMENT ON TABLE archive.mosip_regprc_reg_demo_dedupe_list IS 'Registration Demographic Deduplication List: List of matched UIN / RIDs, as part of demographic data.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_reg_demo_dedupe_list.regtrn_id IS 'Registration Transaction ID: ID of the demo dedupe transaction, Refers to regprc.registration_transaction.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_reg_demo_dedupe_list.matched_reg_id IS 'Matched Registration ID: Registration ID of the individual matching with the host registration id. It can be RID or any other id related to an individual.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_reg_demo_dedupe_list.reg_id IS 'Registration ID: Registration ID for which the matches are found as part of the demographic dedupe process.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_reg_demo_dedupe_list.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_reg_demo_dedupe_list.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_reg_demo_dedupe_list.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_reg_demo_dedupe_list.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_reg_demo_dedupe_list.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_reg_demo_dedupe_list.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-registration_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-registration_transaction.sql new file mode 100644 index 00000000..0fc7e467 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-regprc-registration_transaction.sql @@ -0,0 +1,68 @@ + + +-- object: archive.mosip_regprc_registration_transaction | type: TABLE -- +-- DROP TABLE IF EXISTS archive.mosip_regprc_registration_transaction CASCADE; +CREATE TABLE archive.mosip_regprc_registration_transaction( + id character varying(36) NOT NULL, + reg_id character varying(39) NOT NULL, + trn_type_code character varying(64) NOT NULL, + remarks character varying(256), + parent_regtrn_id character varying(36), + ref_id character varying(64), + ref_id_type character varying(64), + status_code character varying(36) NOT NULL, + sub_status_code character varying(36) NOT NULL, + lang_code character varying(3) NOT NULL, + status_comment character varying(256), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean DEFAULT FALSE, + del_dtimes timestamp, + CONSTRAINT pk_regtrn_id PRIMARY KEY (id) + +); +-- ddl-end -- +-- index creation starts-- +CREATE INDEX IF NOT EXISTS idx_reg_trn_reg_id ON archive.mosip_regprc_registration_transaction USING btree (reg_id); +CREATE INDEX IF NOT EXISTS idx_reg_trn_status_code ON archive.mosip_regprc_registration_transaction USING btree (status_code); +CREATE INDEX IF NOT EXISTS idx_reg_trn_trntypecode ON archive.mosip_regprc_registration_transaction USING btree (trn_type_code); +CREATE INDEX IF NOT EXISTS idx_reg_trn_upd_dtimes ON archive.mosip_regprc_registration_transaction USING btree (upd_dtimes); +--index creation ends-- +COMMENT ON TABLE archive.mosip_regprc_registration_transaction IS 'Registration Transaction: Registration Processor Transaction table is to store ALL Registration Processor packet processing/process transaction details for ID issuance'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.id IS 'ID: Transaction id of the transactions that were recorded in registration module/application'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.reg_id IS 'Registration ID: Registration id for which these transactions are carried out at the registration client application.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.trn_type_code IS 'Transaction Type Code: Type of transaction being processed. Refers to reg.transaction_type.code'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.remarks IS 'Transaction Remarks: Current remarks/comments of the transaction'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.parent_regtrn_id IS 'Parent Registration ID: Parent transaction id that has triggered this transaction (if any). Refers to reg.registration_transaction.id'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.ref_id IS 'Reference ID: Reference id for the transaction if any'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.ref_id_type IS 'reference ID Type: reference ID type of the transaction if any'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.status_code IS 'Status Code: Current status of the transaction. Refers to code field of master.status_list table.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.sub_status_code IS 'Sub Status Code: Current sub status of the registration transaction. Refers to code field of master.status_list table.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.status_comment IS 'Status Comment: Comments provided by the actor during the transaction processing.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.cr_by IS 'Created By : ID or name of the user who create / insert record.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; +-- ddl-end -- +COMMENT ON COLUMN archive.mosip_regprc_registration_transaction.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +-- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-resident-otp_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-resident-otp_transaction.sql new file mode 100644 index 00000000..7f3d8572 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-resident-otp_transaction.sql @@ -0,0 +1,47 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_resident +-- Release Version : 1.2.1 +-- Purpose : Database scripts for Resident Service DB. +-- Create By : Manoj SP +-- Created Date : April-2022 +-- +-- Modified Date Modified By Comments / Remarks +-- -------------------------------------------------------------------------------------------------- +-- April-2022 Manoj SP Added otp_transaction table creation scripts with comments. +----------------------------------------------------------------------------------------------------- + +-- This Table is used to save the OTP for the user whenever user requests for one using the email id / phone number to log into the application. +CREATE TABLE archive.mosip_resident_otp_transaction( + id character varying(36) NOT NULL, + ref_id character varying(1024) NOT NULL, + otp_hash character varying(512) NOT NULL, + generated_dtimes timestamp, + expiry_dtimes timestamp, + validation_retry_count smallint, + status_code character varying(36), + lang_code character varying(3), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_otpt_id_resident PRIMARY KEY (id) +); + +COMMENT ON TABLE archive.mosip_resident_otp_transaction IS 'All OTP related data and validation details are maintained here for Pre Registration module.'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.id IS 'OTP id is a unique identifier (UUID) used as an unique key to identify the OTP transaction'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.ref_id IS 'Reference ID is a reference information received from OTP requester which can be used while validating the OTP. AM: please give examples of ref_id'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.otp_hash IS 'Hash of id, ref_id and otp which is generated based on the configuration setup and sent to the requester application / module.'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.generated_dtimes IS 'Date and Time when the OTP was generated'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.expiry_dtimes IS 'Date Time when the OTP will be expired'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.validation_retry_count IS 'Validation retry counts of this OTP request. If the validation retry crosses the threshold limit, then the OTP will be de-activated.'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.status_code IS 'Current status of the transaction. Refers to code field of master.status_list table.'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.lang_code IS 'For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.cr_by IS 'ID or name of the user who create / insert record.'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.cr_dtimes IS 'Date and Timestamp when the record is created/inserted'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.upd_by IS 'ID or name of the user who update the record with new values'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.upd_dtimes IS 'Date and Timestamp when any of the fields in the record is updated with new values.'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.is_deleted IS 'Flag to mark whether the record is Soft deleted.'; +COMMENT ON COLUMN archive.mosip_resident_otp_transaction.del_dtimes IS 'Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; +----------------------------------------------------------------------------------------------------- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-resident_grievance_ticket.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-resident_grievance_ticket.sql new file mode 100644 index 00000000..ad2a35f2 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-resident_grievance_ticket.sql @@ -0,0 +1,50 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: resident_grievance_ticket +-- Release Version : 1.2.1 +-- Purpose : Database scripts for Resident Service DB. +-- Create By : Kamesh Shekhar Prasad +-- Created Date : December-2022 +-- +-- Modified Date Modified By Comments / Remarks +-- -------------------------------------------------------------------------------------------------- +-- +----------------------------------------------------------------------------------------------------- + +-- This Table is used to save the resident_grievance_ticket table values. +CREATE TABLE archive.mosip_resident_grievance_ticket( + id VARCHAR(64) NOT NULL, + eventId VARCHAR(64) NOT NULL, + name VARCHAR(256) NOT NULL, + emailId VARCHAR(128), + alternateEmailId VARCHAR(128), + phoneNo VARCHAR(64), + alternatePhoneNo VARCHAR(64), + message character varying(1024) NOT NULL, + hasAttachment boolean NOT NULL DEFAULT false, + status character varying(64) NOT NULL, + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean NOT NULL DEFAULT false, + del_dtimes timestamp, + CONSTRAINT pk_resgrev_id PRIMARY KEY (id) +); + +COMMENT ON TABLE archive.mosip_resident_grievance_ticket IS 'This Table is used to save the resident_grievance_ticket table values.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.id IS 'Unique Id.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.eventId IS 'Unique event id.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.emailId IS 'Unique email id.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.alternateEmailId IS 'Alternate email id.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.phoneNo IS 'Phone number.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.alternatePhoneNo IS 'Alternate Phone number.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.message IS 'Message.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.status IS 'status.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.cr_by IS 'created by.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.cr_dtimes IS 'created date and time.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.upd_by IS 'updated by.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.upd_dtimes IS 'updated date and time.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.is_deleted IS 'is deleted.'; +COMMENT ON COLUMN archive.mosip_resident_grievance_ticket.del_dtimes IS 'Deleted time-stamp.'; + +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-resident_session.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-resident_session.sql new file mode 100644 index 00000000..9d17f711 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-resident_session.sql @@ -0,0 +1,34 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_resident +-- Release Version : 1.2.1 +-- Purpose : Database scripts for Resident Service DB. +-- Create By : Loganathan Sekar +-- Created Date : Jan-2023 +-- +-- Modified Date Modified By Comments / Remarks +-- -------------------------------------------------------------------------------------------------- +-- +----------------------------------------------------------------------------------------------------- + +-- This Table is used to save the user actions for the user actions table. + +CREATE TABLE archive.mosip_resident_session( + session_id character varying(128) NOT NULL, + ida_token character varying(128) NOT NULL, + login_dtimes timestamp, + ip_address character varying(128), + host character varying(128), + machine_type character varying(30), + CONSTRAINT pk_session_id PRIMARY KEY (session_id) +); + +COMMENT ON TABLE archive.mosip_resident_session IS 'This Table is used to save the user sessions.'; +COMMENT ON COLUMN archive.mosip_resident_session.session_id IS 'The unique session identifier for each login'; +COMMENT ON COLUMN archive.mosip_resident_session.ida_token IS 'The unique identifier for each user'; +COMMENT ON COLUMN archive.mosip_resident_session.login_dtimes IS 'The time when the user last logged in'; +COMMENT ON COLUMN archive.mosip_resident_session.ip_address IS 'The ip_address of device from which the user logged in'; +COMMENT ON COLUMN archive.mosip_resident_session.host IS 'The host of the site'; +COMMENT ON COLUMN archive.mosip_resident_session.machine_type IS 'The OS of device used for accessing the portal/app'; + +-- Adding index to ida_token column +CREATE INDEX idx_resident_session_ida_token ON archive.mosip_resident_session (ida_token); diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-resident_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-resident_transaction.sql new file mode 100644 index 00000000..432e5b65 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-resident_transaction.sql @@ -0,0 +1,109 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_resident +-- Release Version : 1.2.1 +-- Purpose : Database scripts for Resident Service DB. +-- Create By : Kamesh Shekhar Prasad +-- Created Date : April-2022 +-- +-- Modified Date Modified By Comments / Remarks +-- -------------------------------------------------------------------------------------------------- +-- +----------------------------------------------------------------------------------------------------- + +-- This Table is used to save the transaction related to residents. +CREATE TABLE archive.mosip_resident_transaction( + event_id VARCHAR(64) NOT NULL, + request_trn_id character varying(64) , + request_dtimes timestamp NOT NULL, + response_dtime timestamp NOT NULL, + request_type_code character varying(128) NOT NULL, + request_summary character varying(1024) NOT NULL, + status_code character varying(36) NOT NULL, + status_comment character varying(1024), + lang_code character varying(3), + ref_id_type character varying(36), + ref_id character varying(64), + token_id character varying(128) NOT NULL, + requested_entity_type character varying(64), + requested_entity_id character varying(36), + requested_entity_name character varying(128), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean NOT NULL DEFAULT false, + del_dtimes timestamp, + auth_type_code character varying(128), + static_tkn_id character varying(64), + request_signature character varying, + response_signature character varying, + olv_partner_id character varying(36), + aid character varying(64), + reference_link character varying(1024), + read_status boolean NOT NULL DEFAULT false, + pinned_status boolean NOT NULL DEFAULT false, + purpose character varying(1024), + credential_request_id character varying(256), + attribute_list character varying(255), + individual_id character varying(1024), + consent character varying(50), + tracking_id character varying(50), + CONSTRAINT pk_restrn_event_id PRIMARY KEY (event_id) +); + +COMMENT ON TABLE archive.mosip_resident_transaction IS 'This Table is used to save the transaction related to residents.'; +COMMENT ON COLUMN archive.mosip_resident_transaction.event_id IS 'Unique Id of the transaction.'; +COMMENT ON COLUMN archive.mosip_resident_transaction.aid IS 'The Application ID'; +COMMENT ON COLUMN archive.mosip_resident_transaction.request_dtimes IS 'The time when the request is received by the service'; +COMMENT ON COLUMN archive.mosip_resident_transaction.response_dtime IS 'The time when the response is received by the service'; +COMMENT ON COLUMN archive.mosip_resident_transaction.request_trn_id IS 'The unique identifier for each transaction'; +COMMENT ON COLUMN archive.mosip_resident_transaction.request_type_code IS 'The type of request'; +COMMENT ON COLUMN archive.mosip_resident_transaction.request_summary IS 'The summary of the request'; +COMMENT ON COLUMN archive.mosip_resident_transaction.status_code IS 'The current status of the request'; +COMMENT ON COLUMN archive.mosip_resident_transaction.status_comment IS 'The comment for the status of the request'; +COMMENT ON COLUMN archive.mosip_resident_transaction.lang_code IS 'The language code for the request for multi-language support'; +COMMENT ON COLUMN archive.mosip_resident_transaction.ref_id_type IS 'The type of reference id'; +COMMENT ON COLUMN archive.mosip_resident_transaction.ref_id IS 'The reference id'; +COMMENT ON COLUMN archive.mosip_resident_transaction.token_id IS 'The token id'; +COMMENT ON COLUMN archive.mosip_resident_transaction.requested_entity_type IS 'The type of the requested entity'; +COMMENT ON COLUMN archive.mosip_resident_transaction.requested_entity_id IS 'The id of the requested entity'; +COMMENT ON COLUMN archive.mosip_resident_transaction.requested_entity_name IS 'The name of the requested entity'; +COMMENT ON COLUMN archive.mosip_resident_transaction.cr_by IS 'The user who created the record'; +COMMENT ON COLUMN archive.mosip_resident_transaction.cr_dtimes IS 'The time when the record is created'; +COMMENT ON COLUMN archive.mosip_resident_transaction.upd_by IS 'The user who updated the record'; +COMMENT ON COLUMN archive.mosip_resident_transaction.upd_dtimes IS 'The time when the record is updated'; +COMMENT ON COLUMN archive.mosip_resident_transaction.is_deleted IS 'The flag to identify if the record is deleted or not'; +COMMENT ON COLUMN archive.mosip_resident_transaction.del_dtimes IS 'The time when the record is deleted'; +COMMENT ON COLUMN archive.mosip_resident_transaction.auth_type_code IS 'The type of the authentication'; +COMMENT ON COLUMN archive.mosip_resident_transaction.static_tkn_id IS 'The static token id'; +COMMENT ON COLUMN archive.mosip_resident_transaction.request_signature IS 'The signature of the request'; +COMMENT ON COLUMN archive.mosip_resident_transaction.response_signature IS 'The signature of the response'; +COMMENT ON COLUMN archive.mosip_resident_transaction.olv_partner_id IS 'The partner id'; +COMMENT ON COLUMN archive.mosip_resident_transaction.reference_link IS 'The reference link'; +COMMENT ON COLUMN archive.mosip_resident_transaction.read_status IS 'The flag to identify if the request is read or not'; +COMMENT ON COLUMN archive.mosip_resident_transaction.pinned_status IS 'The flag to identify if the request is pinned or not'; +COMMENT ON COLUMN archive.mosip_resident_transaction.purpose IS 'The purpose of the request'; +COMMENT ON COLUMN archive.mosip_resident_transaction.credential_request_id IS 'The credential request id'; + +-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +-- Adding index to event_id column +CREATE INDEX idx_resident_transaction_event_id ON archive.mosip_resident_transaction (event_id); + +-- Adding index to token_id column +CREATE INDEX idx_resident_transaction_token_id ON archive.mosip_resident_transaction (token_id); + +-- Adding index to credential_request_id column +CREATE INDEX idx_resident_transaction_credential_request_id ON archive.mosip_resident_transaction (credential_request_id); + +-- Adding index to request_dtimes column +CREATE INDEX idx_resident_transaction_request_dtimes ON archive.mosip_resident_transaction (request_dtimes); + +-- Adding index to request_trn_id column +CREATE INDEX idx_resident_transaction_request_trn_id ON archive.mosip_resident_transaction (request_trn_id); + +-- Adding index to ref_id column +CREATE INDEX idx_resident_transaction_ref_id ON archive.mosip_resident_transaction (ref_id); + +--Adding index to read_status column +CREATE INDEX idx_resident_transaction_read_status ON archive.mosip_resident_transaction (read_status); + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-resident_user_actions.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-resident_user_actions.sql new file mode 100644 index 00000000..771f1434 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-resident_user_actions.sql @@ -0,0 +1,27 @@ +-- ------------------------------------------------------------------------------------------------- +-- Database Name: mosip_resident +-- Release Version : 1.2.1 +-- Purpose : Database scripts for Resident Service DB. +-- Create By : Kamesh Shekhar Prasad +-- Created Date : Aug-2022 +-- +-- Modified Date Modified By Comments / Remarks +-- -------------------------------------------------------------------------------------------------- +-- +----------------------------------------------------------------------------------------------------- + +-- This Table is used to save the user actions for the user actions table. + +CREATE TABLE archive.mosip_resident_user_actions( + ida_token character varying(128) NOT NULL, + last_bell_notif_click_dtimes timestamp, + CONSTRAINT pk_ida_token PRIMARY KEY (ida_token) +); + +COMMENT ON TABLE archive.mosip_resident_user_actions IS 'This Table is used to save the user actions'; +COMMENT ON COLUMN archive.mosip_resident_user_actions.ida_token IS 'The unique identifier for each user'; +COMMENT ON COLUMN archive.mosip_resident_user_actions.last_bell_notif_click_dtimes IS 'The time when the user last clicked on the bell notification'; + +-- Adding index to ida_token column +CREATE INDEX idx_resident_user_actions_ida_token ON archive.mosip_resident_user_actions (ida_token); + diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql deleted file mode 100644 index a51f868e..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_biometric_h.sql +++ /dev/null @@ -1,61 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive. --- Purpose : --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.uin_biometric_h | type: TABLE -- --- DROP TABLE IF EXISTS archive.uin_biometric_h CASCADE; -CREATE TABLE archive.uin_biometric_h( - uin_ref_id character varying(36) NOT NULL, - biometric_file_type character varying(36) NOT NULL, - eff_dtimes timestamp NOT NULL, - bio_file_id character varying(128) NOT NULL, - biometric_file_name character varying(128) NOT NULL, - biometric_file_hash character varying(64) NOT NULL, - lang_code character varying(3) NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - CONSTRAINT pk_uinbh PRIMARY KEY (uin_ref_id,biometric_file_type,eff_dtimes), - CONSTRAINT uk_uinbh UNIQUE (uin_ref_id,bio_file_id,eff_dtimes) - -); --- ddl-end -- -COMMENT ON TABLE archive.uin_biometric_h IS 'UIN Biometric History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. '; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.uin_ref_id IS 'UIN Reference ID: System generated id mapped to a UIN used for references in the system. UIN reference ID is also used as folder/bucket in DFS (HDFS/CEPH) to store documents and biometric CBEFF file. refers to idrepo.uin.uin_ref_id'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.biometric_file_type IS 'Biometric File Type: Type of the biometric file stored in DFS (HDFS/CEPPH). File type can be individual biometric file or parent /guardian biometric file.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.eff_dtimes IS 'Effective Datetimestamp : This to track base table record changes whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time till next change occurs.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.bio_file_id IS 'Biometric File ID: ID of the biometric CBEFF file that is stored in filesystem storage like HDFS/CEPH. If File ID Is not available then name of the file itself can be used as file ID.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.biometric_file_name IS 'Biometric File Name: Name of the biometric CBEFF file that is stored in filesystem storage like HDFS/CEPH.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.biometric_file_hash IS 'Biometric File Hash: Hash value of the Biometric CBEFF file which is stored in DFS (HDFS/CEPH) storage. While reading the file, hash value of the file is verified with this hash value to ensure file validity.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_biometric_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql deleted file mode 100644 index 234855dc..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_document_h.sql +++ /dev/null @@ -1,67 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.uin_document_h --- Purpose : UIN Document History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.uin_document_h | type: TABLE -- --- DROP TABLE IF EXISTS archive.uin_document_h CASCADE; -CREATE TABLE archive.uin_document_h( - uin_ref_id character varying(36) NOT NULL, - doccat_code character varying(36) NOT NULL, - doctyp_code character varying(64) NOT NULL, - eff_dtimes timestamp NOT NULL, - doc_id character varying(128) NOT NULL, - doc_name character varying(128) NOT NULL, - docfmt_code character varying(36) NOT NULL, - doc_hash character varying(64) NOT NULL, - lang_code character varying(3) NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - CONSTRAINT pk_uindh PRIMARY KEY (uin_ref_id,doccat_code,eff_dtimes), - CONSTRAINT uk_uindh UNIQUE (uin_ref_id,doc_id,eff_dtimes) - -); --- ddl-end -- -COMMENT ON TABLE archive.uin_document_h IS 'UIN Document History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. '; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.uin_ref_id IS 'UIN Reference ID: System generated id mapped to a UIN used for references in the system. UIN reference ID is also used as folder/bucket in DFS (HDFS/CEPH) to store documents and biometric CBEFF file. refers to idrepo.uin.uin_ref_id'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.doccat_code IS 'Document Category Code: Category code under which document is uploaded during the registration process for ex., POA, POI, etc. Refers to master.doc_category.code'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.doctyp_code IS 'Document Type Code: Document type under which document is uploaded during the registration process for ex., passport, driving license, etc. Refers to master.doc_type.code.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.eff_dtimes IS 'Effective Datetimestamp : This to track base table record changes whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time till next change occurs.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.doc_id IS 'Document ID: ID of the document that is stored in filesystem storage like HDFS/CEPH. If document ID Is not available then name of the file itself can be used as document ID.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.doc_name IS 'Document Name: Name of the document that is stored in filesystem storage like HDFS/CEPH.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.docfmt_code IS 'Document Format Code: Document format code of the document that is uploaded during the registration process for ex., PDF, JPG etc. Refers to master.doc_file_format.code'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.doc_hash IS 'Document Hash: Hash value of the document which is stored in DFS (HDFS/CEPH) storage. While reading the document, hash value of the document is verified with this hash value to ensure document validity.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_document_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql deleted file mode 100644 index cdc4a435..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-uin_h.sql +++ /dev/null @@ -1,70 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive.uin_h --- Purpose : UIN History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.uin_h | type: TABLE -- --- DROP TABLE IF EXISTS archive.uin_h CASCADE; -CREATE TABLE archive.uin_h( - uin_ref_id character varying(36) NOT NULL, - eff_dtimes timestamp NOT NULL, - uin character varying(500) NOT NULL, - uin_hash character varying(128) NOT NULL, - uin_data bytea NOT NULL, - uin_data_hash character varying(64) NOT NULL, - reg_id character varying(39) NOT NULL, - bio_ref_id character varying(128), - status_code character varying(32) NOT NULL, - lang_code character varying(3) NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - CONSTRAINT pk_uinh PRIMARY KEY (uin_ref_id,eff_dtimes), - CONSTRAINT uk_uinh UNIQUE (uin,eff_dtimes) - -); --- ddl-end -- -COMMENT ON TABLE archive.uin_h IS 'UIN History : This to track changes to base table record whenever there is an INSERT/UPDATE/DELETE ( soft delete ), Effective DateTimestamp is used for identifying latest or point in time information. Refer base table description for details. '; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.uin_ref_id IS 'UIN Reference ID: System generated id mapped to a UIN used for references in the system. UIN reference ID is also used as folder/bucket in DFS (HDFS/CEPH) to store documents and biometric CBEFF file.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.eff_dtimes IS 'Effective Datetimestamp : This to track base table record changes whenever there is an INSERT/UPDATE/DELETE ( soft delete ). The current record is effective from this date-time till next change occurs.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.uin IS 'Unique Identification Number : Unique identification number assigned to individual.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.uin_hash IS 'Unique Identification Number Hash: Hash value of Unique identification number assigned to individual.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.uin_data IS 'UIN Data: Information of an individual stored in JSON file as per ID definition defined by the country in the system'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.uin_data_hash IS 'UIN Data Hash: Hash value of the UIN data which is stored in uin_data field. While reading the JSON file, hash value of the file is verified with this hash value to ensure file validity.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.reg_id IS 'Registration ID: Latest registration ID through which individual information got processed and registered'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.bio_ref_id IS 'Biometric Reference Id: Biometric reference id generated which will be used as a reference id in ABIS systems'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.status_code IS 'Status Code: Current Status code of the UIN. Refers to master.status_list.code'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.lang_code IS 'Language Code : For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language. '; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.cr_by IS 'Created By : ID or name of the user who create / insert record'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.uin_h.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql deleted file mode 100644 index a71d48bb..00000000 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-vid.sql +++ /dev/null @@ -1,65 +0,0 @@ --- ------------------------------------------------------------------------------------------------- --- Database Name: mosip_archive --- Table Name : archive. --- Purpose : --- Create By : Sadanandegowda --- Created Date : Dec-2020 --- --- Modified Date Modified By Comments / Remarks --- ------------------------------------------------------------------------------------------ --- --- ------------------------------------------------------------------------------------------ - --- object: archive.vid | type: TABLE -- --- DROP TABLE IF EXISTS archive.vid CASCADE; -CREATE TABLE archive.vid( - id character varying(36) NOT NULL, - vid character varying(36) NOT NULL, - uin_hash character varying(128) NOT NULL, - uin character varying(500) NOT NULL, - vidtyp_code character varying(36) NOT NULL, - generated_dtimes timestamp NOT NULL, - expiry_dtimes timestamp, - status_code character varying(32) NOT NULL, - cr_by character varying(256) NOT NULL, - cr_dtimes timestamp NOT NULL, - upd_by character varying(256), - upd_dtimes timestamp, - is_deleted boolean, - del_dtimes timestamp, - CONSTRAINT pk_vid PRIMARY KEY (id), - CONSTRAINT uk_vid UNIQUE (vid), - CONSTRAINT uk_vid_uinhash UNIQUE (uin_hash,vidtyp_code,generated_dtimes) - -); --- ddl-end -- -COMMENT ON TABLE archive.vid IS 'VID: To store generated list of Virtual IDs mapped to a UIN that can be used for Authentication. UIN of an individual should be secure, not to be disclosed publicly, so as part of security, VIDs are introduced. VIDs are timebound, can be changed, etc.'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.id IS 'ID: Unique id generated by the system for each of the virtual id generated'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.vid IS 'Virtual ID: Vertual Identification Number assigned to an individual, This vertual id can be used for individual authentication instead of using UIN'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.uin_hash IS 'UIN Hash: Unique Identification Number Hash: Hash value of Unique identification number assigned to individual.'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.uin IS 'UIN: Unique Identification Number : Unique identification number assigned to individual. Which is mapped to VID in idmap.'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.vidtyp_code IS 'Virtual ID Type: Type of an VID, Individual can have any VIDs which will used for multiple purposes. VID type can be perpetual ID, timebound ID..etc.'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.generated_dtimes IS 'Generated Date and Time: Date and timestamp when Vertual ID genereated.'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.expiry_dtimes IS 'Expiry Date and Time: Expiry Date and Time of the Vertual ID'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.status_code IS 'Status Code: Current Status code of the Virtual ID. Refers to master.status_list.code'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.cr_by IS 'Created By : ID or name of the user who create / insert record'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.cr_dtimes IS 'Created DateTimestamp : Date and Timestamp when the record is created/inserted'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.upd_by IS 'Updated By : ID or name of the user who update the record with new values'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.upd_dtimes IS 'Updated DateTimestamp : Date and Timestamp when any of the fields in the record is updated with new values.'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.is_deleted IS 'IS_Deleted : Flag to mark whether the record is Soft deleted.'; --- ddl-end -- -COMMENT ON COLUMN archive.vid.del_dtimes IS 'Deleted DateTimestamp : Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; --- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/deploy.properties b/data-archive/db_scripts/mosip_archive/deploy.properties new file mode 100644 index 00000000..dc3bbcb9 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/deploy.properties @@ -0,0 +1,6 @@ +DB_SERVERIP=172.16.0.162 +DB_PORT=30091 +SU_USER=postgres +DEFAULT_DB_NAME=postgres +MOSIP_DB_NAME=mosip_archive +DML_FLAG=0 diff --git a/data-archive/db_scripts/mosip_archive/deploy.sh b/data-archive/db_scripts/mosip_archive/deploy.sh new file mode 100755 index 00000000..19d2a9a0 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/deploy.sh @@ -0,0 +1,49 @@ + +## Properties file +set -e +properties_file="$1" +echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file" +if [ -f "$properties_file" ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found." + while IFS='=' read -r key value + do + key=$(echo $key | tr '.' '_') + eval ${key}=\${value} + done < "$properties_file" +else + echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument." +fi + +## Terminate existing connections +echo "Terminating active connections" +CONN=$(PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit;) +echo "Terminated connections" + +## Create users +echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users" + + +MASTERCONN=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('archiveuser')";exit;) + +if [ ${MASTERCONN} == 0 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating Archive database user" + PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f role_dbuser.sql -v dbuserpwd=\'$DBUSER_PWD\' + ## Create DB + echo "Creating DB" + PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f db.sql + PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f ddl.sql + ## Grants + PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f grants.sql +else + echo "Archive database already exist" +fi + +## Populate tables +if [ ${DML_FLAG} == 1 ] +then + echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying DML for ${MOSIP_DB_NAME} database" + PGPASSWORD=$SU_USER_PWD psql -v ON_ERROR_STOP=1 --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f dml.sql +fi + diff --git a/data-archive/db_scripts/mosip_archive/grants.sql b/data-archive/db_scripts/mosip_archive/grants.sql new file mode 100644 index 00000000..ab4c6280 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/grants.sql @@ -0,0 +1,17 @@ +\c mosip_archive + +GRANT CONNECT + ON DATABASE mosip_archive + TO archiveuser; + +GRANT USAGE + ON SCHEMA archive + TO archiveuser; + +GRANT SELECT,INSERT,UPDATE,DELETE,TRUNCATE,REFERENCES + ON ALL TABLES IN SCHEMA archive + TO archiveuser; + +ALTER DEFAULT PRIVILEGES IN SCHEMA archive + GRANT SELECT,INSERT,UPDATE,DELETE,REFERENCES ON TABLES TO archiveuser; + diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_db.sql b/data-archive/db_scripts/mosip_archive/mosip_archive_db.sql deleted file mode 100644 index 23bc63e7..00000000 --- a/data-archive/db_scripts/mosip_archive/mosip_archive_db.sql +++ /dev/null @@ -1,27 +0,0 @@ -DROP DATABASE IF EXISTS mosip_archive; -CREATE DATABASE mosip_archive - ENCODING = 'UTF8' - LC_COLLATE = 'en_US.UTF-8' - LC_CTYPE = 'en_US.UTF-8' - TABLESPACE = pg_default - OWNER = sysadmin - TEMPLATE = template0; --- ddl-end -- -COMMENT ON DATABASE mosip_archive IS 'Database to store all archive data, Data is archived from multiple tables from each module.'; --- ddl-end -- - -\c mosip_archive sysadmin - --- object: archive | type: SCHEMA -- -DROP SCHEMA IF EXISTS archive CASCADE; -CREATE SCHEMA archive; --- ddl-end -- -ALTER SCHEMA archive OWNER TO sysadmin; --- ddl-end -- - -ALTER DATABASE mosip_archive SET search_path TO archive,pg_catalog,public; --- ddl-end -- - --- REVOKECONNECT ON DATABASE mosip_archive FROM PUBLIC; --- REVOKEALL ON SCHEMA archive FROM PUBLIC; --- REVOKEALL ON ALL TABLES IN SCHEMA archive FROM PUBLIC ; diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh b/data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh deleted file mode 100644 index 03cb90b6..00000000 --- a/data-archive/db_scripts/mosip_archive/mosip_archive_db_deploy.sh +++ /dev/null @@ -1,112 +0,0 @@ -### -- --------------------------------------------------------------------------------------------------------- -### -- Script Name : ARCHIVE DB Artifacts deploy -### -- Deploy Module : MOSIP ARCHIVE DAtabase -### -- Purpose : To deploy MOSIP ARCHIVE Database DB Artifacts. -### -- Create By : Sadanandegowda DM -### -- Created Date : Dec-2020 -### -- -### -- Modified Date Modified By Comments / Remarks -### -- ----------------------------------------------------------------------------------------------------------- - -######### Properties file ############# -set -e -properties_file="$1" -echo `date "+%m/%d/%Y %H:%M:%S"` ": $properties_file" -#properties_file="./app.properties" -if [ -f "$properties_file" ] -then - echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file \"$properties_file\" found." - while IFS='=' read -r key value - do - key=$(echo $key | tr '.' '_') - eval ${key}=\${value} - done < "$properties_file" -else - echo `date "+%m/%d/%Y %H:%M:%S"` ": Property file not found, Pass property file name as argument." -fi -echo `date "+%m/%d/%Y %H:%M:%S"` ": ------------------ Database server and service status check for ${MOSIP_DB_NAME}------------------------" -##############################################LOG FILE CREATION############################################################# - -today=`date '+%d%m%Y_%H%M%S'`; -LOG="${LOG_PATH}${MOSIP_DB_NAME}-${today}.log" -touch $LOG - - -SERVICE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin')";exit; > /dev/null) - -if [ "$SERVICE" -eq 0 ] || [ "$SERVICE" -eq 1 ] -then -echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server and service is up and running" | tee -a $LOG 2>&1 -else -echo `date "+%m/%d/%Y %H:%M:%S"` ": Postgres database server or service is not running" | tee -a $LOG 2>&1 -fi - -echo `date "+%m/%d/%Y %H:%M:%S"` ": ----------------------------------------------------------------------------------------" - -echo `date "+%m/%d/%Y %H:%M:%S"` ": Started sourcing the $MOSIP_DB_NAME Database scripts" | tee -a $LOG 2>&1 -echo `date "+%m/%d/%Y %H:%M:%S"` ": Database scripts are sourcing from :$BASEPATH" | tee -a $LOG 2>&1 - -#========================================DB Deployment process begins on ARCHIVE DB SERVER====================================== - -echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment on $MOSIP_DB_NAME database is started...." | tee -a $LOG 2>&1 -cd /$BASEPATH/$MOSIP_DB_NAME/ -VALUE=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('sysadmin','appadmin','dbadmin')";exit; >> $LOG 2>&1) - echo `date "+%m/%d/%Y %H:%M:%S"` ": Checking for existing users.... Count of existing users:"$VALUE | tee -a $LOG 2>&1 -if [ ${VALUE} == 0 ] -then - echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users" | tee -a $LOG 2>&1 - PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $COMMON_ROLE_FILENAME -v sysadminpwd=\'$SYSADMIN_PWD\' -v dbadminpwd=\'$DBADMIN_PWD\' -v appadminpwd=\'$APPADMIN_PWD\' >> $LOG 2>&1 -elif [ ${VALUE} == 1 ] -then - echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users" | tee -a $LOG 2>&1 - PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $COMMON_ROLE_FILENAME -v sysadminpwd=\'$SYSADMIN_PWD\' -v dbadminpwd=\'$DBADMIN_PWD\' -v appadminpwd=\'$APPADMIN_PWD\' >> $LOG 2>&1 -elif [ ${VALUE} == 2 ] -then - echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating database users" | tee -a $LOG 2>&1 - PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $COMMON_ROLE_FILENAME -v sysadminpwd=\'$SYSADMIN_PWD\' -v dbadminpwd=\'$DBADMIN_PWD\' -v appadminpwd=\'$APPADMIN_PWD\' >> $LOG 2>&1 -else - echo `date "+%m/%d/%Y %H:%M:%S"` ": Database users already exist" | tee -a $LOG 2>&1 -fi - -CONN=$(PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "SELECT count(pg_terminate_backend(pg_stat_activity.pid)) FROM pg_stat_activity WHERE datname = '$MOSIP_DB_NAME' AND pid <> pg_backend_pid()";exit; >> $LOG 2>&1) - -if [ ${CONN} == 0 ] -then - echo `date "+%m/%d/%Y %H:%M:%S"` ": No active database connections exist on ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1 -else - echo `date "+%m/%d/%Y %H:%M:%S"` ": Active connections exist on the database server and active connection will be terminated for DB deployment." | tee -a $LOG 2>&1 -fi -MASTERCONN=$(PGPASSWORD=$SU_USER_PWD psql --username=$SU_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -t -c "select count(1) from pg_roles where rolname IN('archiveuser')";exit; >> $LOG 2>&1) - -if [ ${MASTERCONN} == 0 ] -then - echo `date "+%m/%d/%Y %H:%M:%S"` ": Creating Archive database user" | tee -a $LOG 2>&1 - PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $APP_ROLE_FILENAME -v dbuserpwd=\'$DBUSER_PWD\' >> $LOG 2>&1 -else - echo `date "+%m/%d/%Y %H:%M:%S"` ": Registration Device database user already exist" | tee -a $LOG 2>&1 -fi -PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $DB_CREATION_FILENAME >> $LOG 2>&1 -PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $ACCESS_GRANT_FILENAME >> $LOG 2>&1 -PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -f $DDL_FILENAME >> $LOG 2>&1 - - -if [ ${DML_FLAG} == 1 ] -then - echo `date "+%m/%d/%Y %H:%M:%S"` ": Deploying DML for ${MOSIP_DB_NAME} database" | tee -a $LOG 2>&1 - PGPASSWORD=$SYSADMIN_PWD psql --username=$SYSADMIN_USER --host=$DB_SERVERIP --port=$DB_PORT --dbname=$DEFAULT_DB_NAME -a -b -f $DML_FILENAME >> $LOG 2>&1 -else - echo `date "+%m/%d/%Y %H:%M:%S"` ": There are no DML deployment required for ${MOSIP_DB_NAME}" | tee -a $LOG 2>&1 -fi - -if [ $(grep -c ERROR $LOG) -ne 0 ] -then - echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment is completed with ERRORS, Please check the logs for more information" | tee -a $LOG 2>&1 - echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP database deployment" | tee -a $LOG 2>&1 -else - echo `date "+%m/%d/%Y %H:%M:%S"` ": Database deployment completed successfully, Please check the logs for more information" | tee -a $LOG 2>&1 - echo `date "+%m/%d/%Y %H:%M:%S"` ": END of MOSIP \"${MOSIP_DB_NAME}\" database deployment" | tee -a $LOG 2>&1 -fi - -echo "******************************************"`date "+%m/%d/%Y %H:%M:%S"` "*****************************************************" >> $LOG 2>&1 - -#========================================DB Deployment process completes on ARCHIVE DB SERVER====================================== diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql b/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql deleted file mode 100644 index fc6f5334..00000000 --- a/data-archive/db_scripts/mosip_archive/mosip_archive_ddl_deploy.sql +++ /dev/null @@ -1,24 +0,0 @@ -\c mosip_archive sysadmin - -\ir ddl/archive-app_audit_log.sql -\ir ddl/archive-applicant_demographic_consumed.sql -\ir ddl/archive-applicant_document_consumed.sql -\ir ddl/archive-auth_transaction.sql -\ir ddl/archive-otp_transaction.sql -\ir ddl/archive-processed_prereg_list.sql -\ir ddl/archive-reg_appointment_consumed.sql -\ir ddl/archive-reg_demo_dedupe_list.sql -\ir ddl/archive-reg_manual_verification.sql -\ir ddl/archive-registered_authdevice_master_h.sql -\ir ddl/archive-registered_regdevice_master_h.sql -\ir ddl/archive-registration_transaction.sql -\ir ddl/archive-uin_biometric_h.sql -\ir ddl/archive-uin_document_h.sql -\ir ddl/archive-uin_h.sql -\ir ddl/archive-vid.sql - - -\ir ddl/archive-credential_event_store.sql -\ir ddl/archive-applications.sql -\ir ddl/archive-otp_transaction.sql - diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties b/data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties deleted file mode 100644 index cb0b3559..00000000 --- a/data-archive/db_scripts/mosip_archive/mosip_archive_deploy.properties +++ /dev/null @@ -1,15 +0,0 @@ -DB_SERVERIP= -DB_PORT= -SU_USER=postgres -DEFAULT_DB_NAME=postgres -MOSIP_DB_NAME=mosip_archive -SYSADMIN_USER=sysadmin -BASEPATH=/home/madmin/database/ -LOG_PATH=/home/madmin/logs/ -COMMON_ROLE_FILENAME=mosip_role_common.sql -APP_ROLE_FILENAME=mosip_role_archiveuser.sql -DB_CREATION_FILENAME=mosip_archive_db.sql -ACCESS_GRANT_FILENAME=mosip_archive_grants.sql -DDL_FILENAME=mosip_archive_ddl_deploy.sql -DML_FLAG=0 -DML_FILENAME=mosip_archive_dml_deploy.sql diff --git a/data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql b/data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql deleted file mode 100644 index 52ec69a4..00000000 --- a/data-archive/db_scripts/mosip_archive/mosip_archive_grants.sql +++ /dev/null @@ -1,48 +0,0 @@ -\c mosip_archive sysadmin - - --- object: grant_b0ae4f0dce | type: PERMISSION -- -GRANT CREATE,CONNECT,TEMPORARY - ON DATABASE mosip_archive - TO sysadmin; --- ddl-end -- - --- object: grant_99dd1cb062 | type: PERMISSION -- -GRANT CREATE,CONNECT,TEMPORARY - ON DATABASE mosip_archive - TO appadmin; --- ddl-end -- - --- object: grant_18180691b7 | type: PERMISSION -- -GRANT CONNECT - ON DATABASE mosip_archive - TO archiveuser; --- ddl-end -- - --- object: grant_3543fb6cf7 | type: PERMISSION -- -GRANT CREATE,USAGE - ON SCHEMA archive - TO sysadmin; --- ddl-end -- - --- object: grant_8e1a2559ed | type: PERMISSION -- -GRANT USAGE - ON SCHEMA archive - TO archiveuser; --- ddl-end -- - --- object: grant_8e1a2559ed | type: PERMISSION -- -GRANT SELECT,INSERT,UPDATE,DELETE,TRUNCATE,REFERENCES - ON ALL TABLES IN SCHEMA archive - TO archiveuser; --- ddl-end -- - -ALTER DEFAULT PRIVILEGES IN SCHEMA archive - GRANT SELECT,INSERT,UPDATE,DELETE,REFERENCES ON TABLES TO archiveuser; - - --- object: grant_78ed2da4ee | type: PERMISSION -- -GRANT SELECT,INSERT,UPDATE,DELETE,TRUNCATE,REFERENCES - ON ALL TABLES IN SCHEMA archive - TO appadmin; --- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql b/data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql deleted file mode 100644 index 35c502ca..00000000 --- a/data-archive/db_scripts/mosip_archive/mosip_role_archiveuser.sql +++ /dev/null @@ -1,7 +0,0 @@ --- object: archiveuser | type: ROLE -- --- DROP ROLE IF EXISTS archiveuser; -CREATE ROLE archiveuser WITH - INHERIT - LOGIN - PASSWORD :dbuserpwd; --- ddl-end -- diff --git a/data-archive/db_scripts/mosip_archive/mosip_role_common.sql b/data-archive/db_scripts/mosip_archive/mosip_role_common.sql deleted file mode 100644 index 4e4c083c..00000000 --- a/data-archive/db_scripts/mosip_archive/mosip_role_common.sql +++ /dev/null @@ -1,31 +0,0 @@ --- object: sysadmin | type: ROLE -- ---DROP ROLE IF EXISTS sysadmin; -CREATE ROLE sysadmin WITH - SUPERUSER - CREATEDB - CREATEROLE - INHERIT - LOGIN - REPLICATION - PASSWORD :sysadminpwd; --- ddl-end -- - --- object: dbadmin | type: ROLE -- ---DROP ROLE IF EXISTS dbadmin; -CREATE ROLE dbadmin WITH - CREATEDB - CREATEROLE - INHERIT - LOGIN - REPLICATION - PASSWORD :dbadminpwd; --- ddl-end -- - --- object: appadmin | type: ROLE -- ---DROP ROLE IF EXISTS appadmin; -CREATE ROLE appadmin WITH - INHERIT - LOGIN - PASSWORD :appadminpwd; --- ddl-end -- - diff --git a/data-archive/db_scripts/mosip_archive/role_dbuser.sql b/data-archive/db_scripts/mosip_archive/role_dbuser.sql new file mode 100644 index 00000000..0125c192 --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/role_dbuser.sql @@ -0,0 +1,4 @@ +CREATE ROLE archiveuser WITH + INHERIT + LOGIN + PASSWORD :dbuserpwd; diff --git a/data-archive/entrypoint.sh b/data-archive/entrypoint.sh new file mode 100755 index 00000000..225ff02f --- /dev/null +++ b/data-archive/entrypoint.sh @@ -0,0 +1,17 @@ +#!/bin/sh +# entrypoint.sh +set -e + +echo "Executing db.sh" + +bash db.sh + +echo "executed db.sh succesfully" + +sleep 1m + +echo "Executing archive.sh" + +bash archive.sh + +echo "executed archive.sh successfully" diff --git a/data-archive/requirements.txt b/data-archive/requirements.txt new file mode 100644 index 00000000..100e24e4 --- /dev/null +++ b/data-archive/requirements.txt @@ -0,0 +1,3 @@ +psycopg2-binary==2.9.6 +configparser==5.2.0 +datetime==5.2.0 From 88c0335daa452163f504aa20a97b8335bc2e2626 Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Tue, 14 Nov 2023 17:00:10 +0530 Subject: [PATCH 006/130] [MOSIP-30095] Signed-off-by: bhumi46 --- data-archive/archive-jobs/db.properties | 50 ++++++++++++++++++++++++- 1 file changed, 49 insertions(+), 1 deletion(-) diff --git a/data-archive/archive-jobs/db.properties b/data-archive/archive-jobs/db.properties index 07020717..d7503567 100644 --- a/data-archive/archive-jobs/db.properties +++ b/data-archive/archive-jobs/db.properties @@ -1,5 +1,5 @@ [Databases] -DB_NAMES = AUDIT, CREDENTIAL, IDA +DB_NAMES = AUDIT,CREDENTIAL,ESIGNET,IDA,IDREPO,KERNEL,MASTER,PMS,REGPRC,RESIDENT [ARCHIVE] ARCHIVE_DB_HOST=192.168.0.142 @@ -32,3 +32,51 @@ IDA_SOURCE_DB_NAME=mosip_ida IDA_SOURCE_SCHEMA_NAME=ida IDA_SOURCE_DB_UNAME=idauser IDA_SOURCE_DB_PASS=mosip123 + +[ESIGNET] +ESIGNET_SOURCE_DB_HOST=192.168.0.142 +ESIGNET_SOURCE_DB_PORT=30091 +ESIGNET_SOURCE_DB_NAME=mosip_esignet +ESIGNET_SOURCE_SCHEMA_NAME=esignet +ESIGNER_SOURCE_DB_UNAME=esignetuser +ESIGNET_SOURCE_DB_PASS=mosip123 + +[KERNEL] +KERNEL_SOURCE_DB_HOST=192.168.0.142 +KERNEL_SOURCE_DB_PORT=30091 +KERNEL_SOURCE_DB_NAME=mosip_kernel +KERNEL_SOURCE_SCHEMA_NAME=kernel +KERNEL_SOURCE_DB_UNAME=kerneluser +KERNEL_SOURCE_DB_PASS=mosip123 + +[MASTER] +MASTER_SOURCE_DB_HOST=192.168.0.142 +MASTER_SOURCE_DB_PORT=30091 +MASTER_SOURCE_DB_NAME=mosip_master +MASTER_SOURCE_SCHEMA_NAME=master +MASTER_SOURCE_DB_UNAME=masteruser +MASTER_SOURCE_DB_PASS=mosip123 + +[PMS] +PMS_SOURCE_DB_HOST=192.168.0.142 +PMS_SOURCE_DB_PORT=30091 +PMS_SOURCE_DB_NAME=mosip_pms +PMS_SOURCE_SCHEMA_NAME=pms +PMS_SOURCE_DB_UNAME=pmsuser +PMS_SOURCE_DB_PASS=mosip123 + +[REGPRC] +REGPRC_SOURCE_DB_HOST=192.168.0.142 +REGPRC_SOURCE_DB_PORT=30091 +REGPRC_SOURCE_DB_NAME=mosip_regprc +REGPRC_SOURCE_SCHEMA_NAME=regprc +REGPRC_SOURCE_DB_UNAME=regprcuser +REGPRC_SOURCE_DB_PASS=mosip123 + +[RESIDENT] +RESIDENT_SOURCE_DB_HOST=192.168.0.142 +RESIDENT_SOURCE_DB_PORT=30091 +RESIDENT_SOURCE_DB_NAME=mosip_resident +RESIDENT_SOURCE_SCHEMA_NAME=resident +RESIDENT_SOURCE_DB_UNAME=residentuser +RESIDNET_SOURCE_DB_PASS=mosip123 From 0c426264f899f49bb99dff83e566288d1a27d8e1 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 16 Nov 2023 17:27:38 +0530 Subject: [PATCH 007/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../archive-jobs/mosip_archive_main.py | 88 +++++++++++-------- 1 file changed, 52 insertions(+), 36 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index f218b90e..17c2e049 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -1,26 +1,21 @@ #!/usr/bin/python # -*- coding: utf-8 -*- import sys +import os import psycopg2 import configparser -import datetime -import os import json from datetime import datetime def config(): config = configparser.ConfigParser() - if os.path.exists('db.properties'): - config.read('db.properties') - archive_param = {key.upper(): config['ARCHIVE'][key] for key in config['ARCHIVE']} - source_param = {db_name: {key.upper(): config[db_name][key] for key in config[db_name]} for db_name in config.sections() if db_name != 'ARCHIVE'} - - # Read database names from properties file - db_names = config.get('Databases', 'DB_NAMES').split(',') - db_names = [name.strip() for name in db_names] # Strip leading and trailing spaces - print("db.properties file found and loaded.") - else: - print("db.properties file not found. Using environment variables.") + archive_param = {} + source_param = {} + db_names = [] + + # Check environment variables first + if os.environ.get('ARCHIVE_DB_HOST') is not None: + print("Using database connection parameters from environment variables.") archive_param = { 'ARCHIVE_DB_HOST': os.environ.get('ARCHIVE_DB_HOST'), 'ARCHIVE_DB_PORT': os.environ.get('ARCHIVE_DB_PORT'), @@ -29,15 +24,14 @@ def config(): 'ARCHIVE_DB_UNAME': os.environ.get('ARCHIVE_DB_UNAME'), 'ARCHIVE_DB_PASS': os.environ.get('ARCHIVE_DB_PASS') } + db_names_env = os.environ.get('DB_NAMES') if db_names_env is not None: - db_names = db_names_env.split(',') - db_names = [name.strip() for name in db_names] # Strip leading and trailing spaces + db_names = [name.strip() for name in db_names_env.split(',')] else: - print("Error: DB_NAMES not found in properties file or environment variables.") + print("Error: DB_NAMES not found in environment variables.") sys.exit(1) - source_param = {} for db_name in db_names: source_param[db_name] = { f'{db_name}_SOURCE_DB_HOST': os.environ.get(f'{db_name}_SOURCE_DB_HOST'), @@ -47,6 +41,28 @@ def config(): f'{db_name}_SOURCE_DB_UNAME': os.environ.get(f'{db_name}_SOURCE_DB_UNAME'), f'{db_name}_SOURCE_DB_PASS': os.environ.get(f'{db_name}_SOURCE_DB_PASS') } + else: + # If environment variables are not set, try reading from db.properties + if os.path.exists('db.properties'): + print("Using database connection parameters from db.properties.") + config.read('db.properties') + archive_param = {key.upper(): config['ARCHIVE'][key] for key in config['ARCHIVE']} + db_names = config.get('Databases', 'DB_NAMES').split(',') + db_names = [name.strip() for name in db_names] + + for db_name in db_names: + source_param[db_name] = { + f'{db_name}_SOURCE_DB_HOST': config.get(db_name, f'{db_name}_SOURCE_DB_HOST'), + f'{db_name}_SOURCE_DB_PORT': config.get(db_name, f'{db_name}_SOURCE_DB_PORT'), + f'{db_name}__SOURCE_DB_NAME': config.get(db_name, f'{db_name}_SOURCE_DB_NAME'), + f'{db_name}_SOURCE_SCHEMA_NAME': config.get(db_name, f'{db_name}_SOURCE_SCHEMA_NAME'), + f'{db_name}_SOURCE_DB_UNAME': config.get(db_name, f'{db_name}_SOURCE_DB_UNAME'), + f'{db_name}_SOURCE_DB_PASS': config.get(db_name, f'{db_name}_SOURCE_DB_PASS') + } + else: + print("Error: db.properties file not found.") + sys.exit(1) + return db_names, archive_param, source_param def getValues(row): @@ -61,15 +77,15 @@ def getValues(row): def read_tables_info(db_name): try: - with open('{}_archive_table_info.json'.format(db_name.lower())) as f: + with open(f'{db_name.lower()}_archive_table_info.json') as f: tables_info = json.load(f) - print("{}_archive_table_info.json file found and loaded.".format(db_name.lower())) + print(f"{db_name.lower()}_archive_table_info.json file found and loaded.") return tables_info['tables_info'] except FileNotFoundError: - print("{}_archive_table_info.json file not found. Using environment variables.".format(db_name.lower())) - tables_info = os.environ.get("{}_archive_table_info".format(db_name.lower())) + print(f"{db_name.lower()}_archive_table_info.json file not found. Using environment variables.") + tables_info = os.environ.get(f"{db_name.lower()}_archive_table_info") if tables_info is None: - print("Environment variable {}_archive_table_info not found.".format(db_name.lower())) + print(f"Environment variable {db_name.lower()}_archive_table_info not found.") sys.exit(1) return json.loads(tables_info)['tables_info'] @@ -81,11 +97,11 @@ def dataArchive(db_name, dbparam, tables_info): try: print('Connecting to the PostgreSQL database...') sourceConn = psycopg2.connect( - user=dbparam["{}_SOURCE_DB_UNAME".format(db_name)], - password=dbparam["{}_SOURCE_DB_PASS".format(db_name)], - host=dbparam["{}_SOURCE_DB_HOST".format(db_name)], - port=dbparam["{}_SOURCE_DB_PORT".format(db_name)], - database=dbparam["{}_SOURCE_DB_NAME".format(db_name)] + user=dbparam[f"{db_name}_SOURCE_DB_UNAME"], + password=dbparam[f"{db_name}_SOURCE_DB_PASS"], + host=dbparam[f"{db_name}_SOURCE_DB_HOST"], + port=dbparam[f"{db_name}_SOURCE_DB_PORT"], + database=dbparam[f"{db_name}_SOURCE_DB_NAME"] ) archiveConn = psycopg2.connect( user=dbparam["ARCHIVE_DB_UNAME"], @@ -96,7 +112,7 @@ def dataArchive(db_name, dbparam, tables_info): ) sourceCur = sourceConn.cursor() archiveCur = archiveConn.cursor() - sschemaName = dbparam["{}_SOURCE_SCHEMA_NAME".format(db_name)] + sschemaName = dbparam[f"{db_name}_SOURCE_SCHEMA_NAME"] aschemaName = dbparam["ARCHIVE_SCHEMA_NAME"] # Loop through the list of table_info dictionaries @@ -107,29 +123,29 @@ def dataArchive(db_name, dbparam, tables_info): if 'date_column' in table_info and 'older_than_days' in table_info: date_column = table_info['date_column'] older_than_days = table_info['older_than_days'] - select_query = "SELECT * FROM {0}.{1} WHERE {2} < NOW() - INTERVAL '{3} days'".format(sschemaName, source_table_name, date_column, older_than_days) + select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{older_than_days} days'" else: - select_query = "SELECT * FROM {0}.{1}".format(sschemaName, source_table_name) + select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" sourceCur.execute(select_query) rows = sourceCur.fetchall() select_count = sourceCur.rowcount - print(select_count, ": Record(s) selected for archive from", source_table_name) + print(f"{select_count} Record(s) selected for archive from {source_table_name}") if select_count > 0: for row in rows: rowValues = getValues(row) - insert_query = "INSERT INTO {0}.{1} VALUES ({2}) ON CONFLICT DO NOTHING".format(aschemaName, archive_table_name, rowValues) + insert_query = f"INSERT INTO {aschemaName}.{archive_table_name} VALUES ({rowValues}) ON CONFLICT DO NOTHING" archiveCur.execute(insert_query) archiveConn.commit() insert_count = archiveCur.rowcount if insert_count == 0: - print("Skipping duplicate record with ID:", row[0]) + print(f"Skipping duplicate record with ID: {row[0]}") else: - print(insert_count, ": Record inserted successfully") - delete_query = 'DELETE FROM "{0}"."{1}" WHERE "{2}" = %s'.format(sschemaName, source_table_name, id_column) + print(f"{insert_count} Record inserted successfully") + delete_query = f'DELETE FROM "{sschemaName}"."{source_table_name}" WHERE "{id_column}" = %s' sourceCur.execute(delete_query, (row[0],)) sourceConn.commit() delete_count = sourceCur.rowcount - print(delete_count, ": Record(s) deleted successfully") + print(f"{delete_count} Record(s) deleted successfully") except (Exception, psycopg2.DatabaseError) as error: print("Error during data archiving:", error) finally: From d47bc458cfb6c22ae03d2c5dd5aab60c562803ef Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 16 Nov 2023 17:37:02 +0530 Subject: [PATCH 008/130] Update db.properties Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/db.properties | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/data-archive/archive-jobs/db.properties b/data-archive/archive-jobs/db.properties index d7503567..4e5a158c 100644 --- a/data-archive/archive-jobs/db.properties +++ b/data-archive/archive-jobs/db.properties @@ -25,14 +25,6 @@ CREDENTIAL_SOURCE_SCHEMA_NAME=credential CREDENTIAL_SOURCE_DB_UNAME=credentialuser CREDENTIAL_SOURCE_DB_PASS=mosip123 -[IDA] -IDA_SOURCE_DB_HOST=192.168.0.142 -IDA_SOURCE_DB_PORT=30091 -IDA_SOURCE_DB_NAME=mosip_ida -IDA_SOURCE_SCHEMA_NAME=ida -IDA_SOURCE_DB_UNAME=idauser -IDA_SOURCE_DB_PASS=mosip123 - [ESIGNET] ESIGNET_SOURCE_DB_HOST=192.168.0.142 ESIGNET_SOURCE_DB_PORT=30091 @@ -41,6 +33,14 @@ ESIGNET_SOURCE_SCHEMA_NAME=esignet ESIGNER_SOURCE_DB_UNAME=esignetuser ESIGNET_SOURCE_DB_PASS=mosip123 +[IDA] +IDA_SOURCE_DB_HOST=192.168.0.142 +IDA_SOURCE_DB_PORT=30091 +IDA_SOURCE_DB_NAME=mosip_ida +IDA_SOURCE_SCHEMA_NAME=ida +IDA_SOURCE_DB_UNAME=idauser +IDA_SOURCE_DB_PASS=mosip123 + [KERNEL] KERNEL_SOURCE_DB_HOST=192.168.0.142 KERNEL_SOURCE_DB_PORT=30091 From 095d811b3d0bd293c741773a3f77cbc50aa7dbe0 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 16 Nov 2023 17:44:33 +0530 Subject: [PATCH 009/130] Update .dockerignore Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/.dockerignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data-archive/.dockerignore b/data-archive/.dockerignore index efd2b6a9..652fab52 100755 --- a/data-archive/.dockerignore +++ b/data-archive/.dockerignore @@ -1,4 +1,4 @@ -archive-jobs/db.properties + **/.git **/.gitignore **/.vscode From 49db63deb516c77a2b3230675750c5607ccb1b5e Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 16 Nov 2023 18:30:32 +0530 Subject: [PATCH 010/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 17c2e049..5a78277e 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -54,7 +54,7 @@ def config(): source_param[db_name] = { f'{db_name}_SOURCE_DB_HOST': config.get(db_name, f'{db_name}_SOURCE_DB_HOST'), f'{db_name}_SOURCE_DB_PORT': config.get(db_name, f'{db_name}_SOURCE_DB_PORT'), - f'{db_name}__SOURCE_DB_NAME': config.get(db_name, f'{db_name}_SOURCE_DB_NAME'), + f'{db_name}_SOURCE_DB_NAME': config.get(db_name, f'{db_name}_SOURCE_DB_NAME'), f'{db_name}_SOURCE_SCHEMA_NAME': config.get(db_name, f'{db_name}_SOURCE_SCHEMA_NAME'), f'{db_name}_SOURCE_DB_UNAME': config.get(db_name, f'{db_name}_SOURCE_DB_UNAME'), f'{db_name}_SOURCE_DB_PASS': config.get(db_name, f'{db_name}_SOURCE_DB_PASS') From dcba2fa0b7e402f63a8cd53b9e8cd64b6110adf3 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Fri, 17 Nov 2023 13:11:13 +0530 Subject: [PATCH 011/130] optimised repetetion of code mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../archive-jobs/mosip_archive_main.py | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 5a78277e..65f82f08 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -33,14 +33,7 @@ def config(): sys.exit(1) for db_name in db_names: - source_param[db_name] = { - f'{db_name}_SOURCE_DB_HOST': os.environ.get(f'{db_name}_SOURCE_DB_HOST'), - f'{db_name}_SOURCE_DB_PORT': os.environ.get(f'{db_name}_SOURCE_DB_PORT'), - f'{db_name}_SOURCE_DB_NAME': os.environ.get(f'{db_name}_SOURCE_DB_NAME'), - f'{db_name}_SOURCE_SCHEMA_NAME': os.environ.get(f'{db_name}_SOURCE_SCHEMA_NAME'), - f'{db_name}_SOURCE_DB_UNAME': os.environ.get(f'{db_name}_SOURCE_DB_UNAME'), - f'{db_name}_SOURCE_DB_PASS': os.environ.get(f'{db_name}_SOURCE_DB_PASS') - } + source_param[db_name] = create_source_param(os.environ, db_name) else: # If environment variables are not set, try reading from db.properties if os.path.exists('db.properties'): @@ -51,20 +44,23 @@ def config(): db_names = [name.strip() for name in db_names] for db_name in db_names: - source_param[db_name] = { - f'{db_name}_SOURCE_DB_HOST': config.get(db_name, f'{db_name}_SOURCE_DB_HOST'), - f'{db_name}_SOURCE_DB_PORT': config.get(db_name, f'{db_name}_SOURCE_DB_PORT'), - f'{db_name}_SOURCE_DB_NAME': config.get(db_name, f'{db_name}_SOURCE_DB_NAME'), - f'{db_name}_SOURCE_SCHEMA_NAME': config.get(db_name, f'{db_name}_SOURCE_SCHEMA_NAME'), - f'{db_name}_SOURCE_DB_UNAME': config.get(db_name, f'{db_name}_SOURCE_DB_UNAME'), - f'{db_name}_SOURCE_DB_PASS': config.get(db_name, f'{db_name}_SOURCE_DB_PASS') - } + source_param[db_name] = create_source_param(config, db_name) else: print("Error: db.properties file not found.") sys.exit(1) return db_names, archive_param, source_param +def create_source_param(config, db_name): + return { + f'{db_name}_SOURCE_DB_HOST': config.get(db_name, f'{db_name}_SOURCE_DB_HOST'), + f'{db_name}_SOURCE_DB_PORT': config.get(db_name, f'{db_name}_SOURCE_DB_PORT'), + f'{db_name}_SOURCE_DB_NAME': config.get(db_name, f'{db_name}_SOURCE_DB_NAME'), + f'{db_name}_SOURCE_SCHEMA_NAME': config.get(db_name, f'{db_name}_SOURCE_SCHEMA_NAME'), + f'{db_name}_SOURCE_DB_UNAME': config.get(db_name, f'{db_name}_SOURCE_DB_UNAME'), + f'{db_name}_SOURCE_DB_PASS': config.get(db_name, f'{db_name}_SOURCE_DB_PASS') + } + def getValues(row): finalValues = "" for value in row: From 3f02fcf021184cbd13f5701e2388a2a0f8f0b39c Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Fri, 17 Nov 2023 15:05:25 +0530 Subject: [PATCH 012/130] optimised repetation of code Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../archive-jobs/mosip_archive_main.py | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 65f82f08..4a48b1c6 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -8,7 +8,6 @@ from datetime import datetime def config(): - config = configparser.ConfigParser() archive_param = {} source_param = {} db_names = [] @@ -33,32 +32,33 @@ def config(): sys.exit(1) for db_name in db_names: - source_param[db_name] = create_source_param(os.environ, db_name) + source_param[db_name] = create_source_param(config_parser=None, env_vars=os.environ, db_name=db_name) else: # If environment variables are not set, try reading from db.properties if os.path.exists('db.properties'): print("Using database connection parameters from db.properties.") - config.read('db.properties') - archive_param = {key.upper(): config['ARCHIVE'][key] for key in config['ARCHIVE']} - db_names = config.get('Databases', 'DB_NAMES').split(',') + config_parser = configparser.ConfigParser() + config_parser.read('db.properties') + archive_param = {key.upper(): config_parser['ARCHIVE'][key] for key in config_parser['ARCHIVE']} + db_names = config_parser.get('Databases', 'DB_NAMES').split(',') db_names = [name.strip() for name in db_names] for db_name in db_names: - source_param[db_name] = create_source_param(config, db_name) + source_param[db_name] = create_source_param(config_parser=config_parser, env_vars=os.environ, db_name=db_name) else: print("Error: db.properties file not found.") sys.exit(1) return db_names, archive_param, source_param -def create_source_param(config, db_name): +def create_source_param(config_parser, env_vars, db_name): return { - f'{db_name}_SOURCE_DB_HOST': config.get(db_name, f'{db_name}_SOURCE_DB_HOST'), - f'{db_name}_SOURCE_DB_PORT': config.get(db_name, f'{db_name}_SOURCE_DB_PORT'), - f'{db_name}_SOURCE_DB_NAME': config.get(db_name, f'{db_name}_SOURCE_DB_NAME'), - f'{db_name}_SOURCE_SCHEMA_NAME': config.get(db_name, f'{db_name}_SOURCE_SCHEMA_NAME'), - f'{db_name}_SOURCE_DB_UNAME': config.get(db_name, f'{db_name}_SOURCE_DB_UNAME'), - f'{db_name}_SOURCE_DB_PASS': config.get(db_name, f'{db_name}_SOURCE_DB_PASS') + f'{db_name}_SOURCE_DB_HOST': env_vars.get(f'{db_name}_SOURCE_DB_HOST') or config_parser.get(db_name, f'{db_name}_SOURCE_DB_HOST'), + f'{db_name}_SOURCE_DB_PORT': env_vars.get(f'{db_name}_SOURCE_DB_PORT') or config_parser.get(db_name, f'{db_name}_SOURCE_DB_PORT'), + f'{db_name}_SOURCE_DB_NAME': env_vars.get(f'{db_name}_SOURCE_DB_NAME') or config_parser.get(db_name, f'{db_name}_SOURCE_DB_NAME'), + f'{db_name}_SOURCE_SCHEMA_NAME': env_vars.get(f'{db_name}_SOURCE_SCHEMA_NAME') or config_parser.get(db_name, f'{db_name}_SOURCE_SCHEMA_NAME'), + f'{db_name}_SOURCE_DB_UNAME': env_vars.get(f'{db_name}_SOURCE_DB_UNAME') or config_parser.get(db_name, f'{db_name}_SOURCE_DB_UNAME'), + f'{db_name}_SOURCE_DB_PASS': env_vars.get(f'{db_name}_SOURCE_DB_PASS') or config_parser.get(db_name, f'{db_name}_SOURCE_DB_PASS') } def getValues(row): From ee8c5bc4b601b817097efc43d38d59945de4ed5c Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Mon, 20 Nov 2023 17:52:17 +0530 Subject: [PATCH 013/130] Update db.properties Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/db.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/data-archive/archive-jobs/db.properties b/data-archive/archive-jobs/db.properties index 4e5a158c..abdbd616 100644 --- a/data-archive/archive-jobs/db.properties +++ b/data-archive/archive-jobs/db.properties @@ -30,7 +30,7 @@ ESIGNET_SOURCE_DB_HOST=192.168.0.142 ESIGNET_SOURCE_DB_PORT=30091 ESIGNET_SOURCE_DB_NAME=mosip_esignet ESIGNET_SOURCE_SCHEMA_NAME=esignet -ESIGNER_SOURCE_DB_UNAME=esignetuser +ESIGNET_SOURCE_DB_UNAME=esignetuser ESIGNET_SOURCE_DB_PASS=mosip123 [IDA] @@ -79,4 +79,4 @@ RESIDENT_SOURCE_DB_PORT=30091 RESIDENT_SOURCE_DB_NAME=mosip_resident RESIDENT_SOURCE_SCHEMA_NAME=resident RESIDENT_SOURCE_DB_UNAME=residentuser -RESIDNET_SOURCE_DB_PASS=mosip123 +RESIDENT_SOURCE_DB_PASS=mosip123 From fd71c9b2cc1c6987fa7ad22ad7b32150a56fe2a4 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Mon, 20 Nov 2023 18:56:02 +0530 Subject: [PATCH 014/130] added for loop for creating source params Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../archive-jobs/mosip_archive_main.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 4a48b1c6..1c253e5d 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -52,16 +52,17 @@ def config(): return db_names, archive_param, source_param def create_source_param(config_parser, env_vars, db_name): - return { - f'{db_name}_SOURCE_DB_HOST': env_vars.get(f'{db_name}_SOURCE_DB_HOST') or config_parser.get(db_name, f'{db_name}_SOURCE_DB_HOST'), - f'{db_name}_SOURCE_DB_PORT': env_vars.get(f'{db_name}_SOURCE_DB_PORT') or config_parser.get(db_name, f'{db_name}_SOURCE_DB_PORT'), - f'{db_name}_SOURCE_DB_NAME': env_vars.get(f'{db_name}_SOURCE_DB_NAME') or config_parser.get(db_name, f'{db_name}_SOURCE_DB_NAME'), - f'{db_name}_SOURCE_SCHEMA_NAME': env_vars.get(f'{db_name}_SOURCE_SCHEMA_NAME') or config_parser.get(db_name, f'{db_name}_SOURCE_SCHEMA_NAME'), - f'{db_name}_SOURCE_DB_UNAME': env_vars.get(f'{db_name}_SOURCE_DB_UNAME') or config_parser.get(db_name, f'{db_name}_SOURCE_DB_UNAME'), - f'{db_name}_SOURCE_DB_PASS': env_vars.get(f'{db_name}_SOURCE_DB_PASS') or config_parser.get(db_name, f'{db_name}_SOURCE_DB_PASS') - } + param_keys = ['SOURCE_DB_HOST', 'SOURCE_DB_PORT', 'SOURCE_DB_NAME', 'SOURCE_SCHEMA_NAME', 'SOURCE_DB_UNAME', 'SOURCE_DB_PASS'] + source_param = {} + + for key in param_keys: + env_key = f'{db_name}_{key}' + source_param[env_key] = env_vars.get(env_key) or config_parser.get(db_name, env_key) + + return source_param + -def getValues(row): +def get_tablevalues(row): finalValues = "" for value in row: if value is None: @@ -128,7 +129,7 @@ def dataArchive(db_name, dbparam, tables_info): print(f"{select_count} Record(s) selected for archive from {source_table_name}") if select_count > 0: for row in rows: - rowValues = getValues(row) + rowValues = get_tablevalues(row) insert_query = f"INSERT INTO {aschemaName}.{archive_table_name} VALUES ({rowValues}) ON CONFLICT DO NOTHING" archiveCur.execute(insert_query) archiveConn.commit() From 2afeb87c7a9b5e04d9cb22fdc9e3e3a94e0298cd Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Tue, 21 Nov 2023 16:04:05 +0530 Subject: [PATCH 015/130] updated print statement Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 1c253e5d..d990263f 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -126,7 +126,7 @@ def dataArchive(db_name, dbparam, tables_info): sourceCur.execute(select_query) rows = sourceCur.fetchall() select_count = sourceCur.rowcount - print(f"{select_count} Record(s) selected for archive from {source_table_name}") + print(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") if select_count > 0: for row in rows: rowValues = get_tablevalues(row) @@ -135,14 +135,14 @@ def dataArchive(db_name, dbparam, tables_info): archiveConn.commit() insert_count = archiveCur.rowcount if insert_count == 0: - print(f"Skipping duplicate record with ID: {row[0]}") + print(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") else: - print(f"{insert_count} Record inserted successfully") + print(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") delete_query = f'DELETE FROM "{sschemaName}"."{source_table_name}" WHERE "{id_column}" = %s' sourceCur.execute(delete_query, (row[0],)) sourceConn.commit() delete_count = sourceCur.rowcount - print(f"{delete_count} Record(s) deleted successfully") + print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") except (Exception, psycopg2.DatabaseError) as error: print("Error during data archiving:", error) finally: From 801df42c0ca223eb2a930ee392999c08254b8894 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 23 Nov 2023 00:46:36 +0530 Subject: [PATCH 016/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../archive-jobs/mosip_archive_main.py | 282 +++++++++++------- 1 file changed, 173 insertions(+), 109 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index d990263f..3fc75714 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -1,57 +1,99 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- import sys import os import psycopg2 import configparser import json +import logging from datetime import datetime +# Constants for keys and formats +DB_PROPERTIES_FILE = 'db.properties' +DATE_FORMAT = '%Y-%m-%d %H:%M:%S' + +class Config: + @staticmethod + def get_db_properties_file(): + return DB_PROPERTIES_FILE + +def check_keys(keys, section, prefix=""): + """ + Check if the required keys are present in the given configuration section. + + Args: + keys (list): List of required keys. + section (configparser.SectionProxy): Configuration section to check. + prefix (str, optional): Prefix to use for environment variables. Defaults to "". + """ + for key in keys: + env_key = f"{prefix}_{key}" if prefix else key + if key not in section and env_key not in section: + logging.error(f"Error: {env_key} not found in {section} section.") + sys.exit(1) + def config(): + # Define required keys for archive and database connection + required_archive_keys = ['ARCHIVE_DB_HOST', 'ARCHIVE_DB_PORT', 'ARCHIVE_DB_NAME', 'ARCHIVE_SCHEMA_NAME', 'ARCHIVE_DB_UNAME', 'ARCHIVE_DB_PASS'] + required_db_names_keys = ['DB_NAMES'] + + # Initialize dictionaries to store parameters archive_param = {} source_param = {} db_names = [] - # Check environment variables first - if os.environ.get('ARCHIVE_DB_HOST') is not None: - print("Using database connection parameters from environment variables.") - archive_param = { - 'ARCHIVE_DB_HOST': os.environ.get('ARCHIVE_DB_HOST'), - 'ARCHIVE_DB_PORT': os.environ.get('ARCHIVE_DB_PORT'), - 'ARCHIVE_DB_NAME': os.environ.get('ARCHIVE_DB_NAME'), - 'ARCHIVE_SCHEMA_NAME': os.environ.get('ARCHIVE_SCHEMA_NAME'), - 'ARCHIVE_DB_UNAME': os.environ.get('ARCHIVE_DB_UNAME'), - 'ARCHIVE_DB_PASS': os.environ.get('ARCHIVE_DB_PASS') - } + # Check if db.properties file exists + if os.path.exists(Config.get_db_properties_file()): + logging.info("Using database connection parameters from db.properties.") + config_parser = configparser.ConfigParser() + config_parser.read(Config.get_db_properties_file()) + + # Check if all required keys are present in ARCHIVE section + check_keys(required_archive_keys, config_parser['ARCHIVE']) + # Check if required keys are present in Databases section + check_keys(required_db_names_keys, config_parser['Databases']) + + # Extract archive parameters and database names from the config file + archive_param = {key.upper(): config_parser['ARCHIVE'][key] for key in config_parser['ARCHIVE']} + db_names = [name.strip() for name in config_parser.get('Databases', 'DB_NAMES').split(',')] + + # Extract source parameters for each database + for db_name in db_names: + required_source_keys = ['SOURCE_DB_HOST', 'SOURCE_DB_PORT', 'SOURCE_DB_NAME', 'SOURCE_SCHEMA_NAME', 'SOURCE_DB_UNAME', 'SOURCE_DB_PASS'] + check_keys(required_source_keys, config_parser[db_name], prefix=db_name) + source_param[db_name] = create_source_param(config_parser=config_parser, env_vars=os.environ, db_name=db_name) + else: + logging.error("Error: db.properties file not found. Using environment variables.") + check_keys(required_archive_keys, os.environ) + + # Extract database names from environment variables db_names_env = os.environ.get('DB_NAMES') if db_names_env is not None: db_names = [name.strip() for name in db_names_env.split(',')] else: - print("Error: DB_NAMES not found in environment variables.") + logging.error("Error: DB_NAMES not found in environment variables.") sys.exit(1) + # Extract source parameters for each database from environment variables for db_name in db_names: + required_source_keys = ['SOURCE_DB_HOST', 'SOURCE_DB_PORT', 'SOURCE_DB_NAME', 'SOURCE_SCHEMA_NAME', 'SOURCE_DB_UNAME', 'SOURCE_DB_PASS'] + check_keys(required_source_keys, os.environ, prefix=db_name) source_param[db_name] = create_source_param(config_parser=None, env_vars=os.environ, db_name=db_name) - else: - # If environment variables are not set, try reading from db.properties - if os.path.exists('db.properties'): - print("Using database connection parameters from db.properties.") - config_parser = configparser.ConfigParser() - config_parser.read('db.properties') - archive_param = {key.upper(): config_parser['ARCHIVE'][key] for key in config_parser['ARCHIVE']} - db_names = config_parser.get('Databases', 'DB_NAMES').split(',') - db_names = [name.strip() for name in db_names] - - for db_name in db_names: - source_param[db_name] = create_source_param(config_parser=config_parser, env_vars=os.environ, db_name=db_name) - else: - print("Error: db.properties file not found.") - sys.exit(1) + # Return extracted parameters return db_names, archive_param, source_param def create_source_param(config_parser, env_vars, db_name): + """ + Create source parameters for a specific database. + + Args: + config_parser (configparser.ConfigParser, optional): Configuration parser. Defaults to None. + env_vars (dict): Environment variables. + db_name (str): Database name. + + Returns: + dict: Source parameters. + """ param_keys = ['SOURCE_DB_HOST', 'SOURCE_DB_PORT', 'SOURCE_DB_NAME', 'SOURCE_SCHEMA_NAME', 'SOURCE_DB_UNAME', 'SOURCE_DB_PASS'] source_param = {} @@ -61,106 +103,128 @@ def create_source_param(config_parser, env_vars, db_name): return source_param - def get_tablevalues(row): - finalValues = "" + """ + Get formatted values for a row in a table. + + Args: + row (list): Row data. + + Returns: + str: Formatted values. + """ + final_values = "" for value in row: if value is None: - finalValues += "NULL," + final_values += "NULL," else: - finalValues += "'" + str(value) + "'," - finalValues = finalValues[:-1] - return finalValues + final_values += f"'{value}'," + final_values = final_values[:-1] + return final_values def read_tables_info(db_name): + """ + Read table information from a JSON file or environment variable. + + Args: + db_name (str): Database name. + + Returns: + list: List of table information dictionaries. + """ try: with open(f'{db_name.lower()}_archive_table_info.json') as f: tables_info = json.load(f) - print(f"{db_name.lower()}_archive_table_info.json file found and loaded.") + logging.info(f"{db_name.lower()}_archive_table_info.json file found and loaded.") return tables_info['tables_info'] except FileNotFoundError: - print(f"{db_name.lower()}_archive_table_info.json file not found. Using environment variables.") + logging.error(f"{db_name.lower()}_archive_table_info.json file not found. Using environment variables.") tables_info = os.environ.get(f"{db_name.lower()}_archive_table_info") if tables_info is None: - print(f"Environment variable {db_name.lower()}_archive_table_info not found.") + logging.error(f"Environment variable {db_name.lower()}_archive_table_info not found.") sys.exit(1) return json.loads(tables_info)['tables_info'] -def dataArchive(db_name, dbparam, tables_info): - sourceConn = None - archiveConn = None - sourceCur = None - archiveCur = None +def data_archive(db_name, db_param, tables_info): + """ + Archive data from a source database to an archive database. + + Args: + db_name (str): Database name. + db_param (dict): Database connection parameters. + tables_info (list): List of table information dictionaries. + """ try: - print('Connecting to the PostgreSQL database...') - sourceConn = psycopg2.connect( - user=dbparam[f"{db_name}_SOURCE_DB_UNAME"], - password=dbparam[f"{db_name}_SOURCE_DB_PASS"], - host=dbparam[f"{db_name}_SOURCE_DB_HOST"], - port=dbparam[f"{db_name}_SOURCE_DB_PORT"], - database=dbparam[f"{db_name}_SOURCE_DB_NAME"] - ) - archiveConn = psycopg2.connect( - user=dbparam["ARCHIVE_DB_UNAME"], - password=dbparam["ARCHIVE_DB_PASS"], - host=dbparam["ARCHIVE_DB_HOST"], - port=dbparam["ARCHIVE_DB_PORT"], - database=dbparam["ARCHIVE_DB_NAME"] - ) - sourceCur = sourceConn.cursor() - archiveCur = archiveConn.cursor() - sschemaName = dbparam[f"{db_name}_SOURCE_SCHEMA_NAME"] - aschemaName = dbparam["ARCHIVE_SCHEMA_NAME"] - - # Loop through the list of table_info dictionaries - for table_info in tables_info: - source_table_name = table_info['source_table'] - archive_table_name = table_info['archive_table'] - id_column = table_info['id_column'] - if 'date_column' in table_info and 'older_than_days' in table_info: - date_column = table_info['date_column'] - older_than_days = table_info['older_than_days'] - select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{older_than_days} days'" - else: - select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") - if select_count > 0: - for row in rows: - rowValues = get_tablevalues(row) - insert_query = f"INSERT INTO {aschemaName}.{archive_table_name} VALUES ({rowValues}) ON CONFLICT DO NOTHING" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - if insert_count == 0: - print(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") - else: - print(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") - delete_query = f'DELETE FROM "{sschemaName}"."{source_table_name}" WHERE "{id_column}" = %s' - sourceCur.execute(delete_query, (row[0],)) - sourceConn.commit() - delete_count = sourceCur.rowcount - print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + # Connect to source and archive databases using context managers + with psycopg2.connect( + user=db_param[f"{db_name}_SOURCE_DB_UNAME"], + password=db_param[f"{db_name}_SOURCE_DB_PASS"], + host=db_param[f"{db_name}_SOURCE_DB_HOST"], + port=db_param[f"{db_name}_SOURCE_DB_PORT"], + database=db_param[f"{db_name}_SOURCE_DB_NAME"] + ) as source_conn, source_conn.cursor() as source_cur,\ + psycopg2.connect( + user=db_param["ARCHIVE_DB_UNAME"], + password=db_param["ARCHIVE_DB_PASS"], + host=db_param["ARCHIVE_DB_HOST"], + port=db_param["ARCHIVE_DB_PORT"], + database=db_param["ARCHIVE_DB_NAME"] + ) as archive_conn, archive_conn.cursor() as archive_cur: + + sschema_name = db_param[f"{db_name}_SOURCE_SCHEMA_NAME"] + aschema_name = db_param["ARCHIVE_SCHEMA_NAME"] + + for table_info in tables_info: + source_table_name = table_info['source_table'] + archive_table_name = table_info['archive_table'] + id_column = table_info['id_column'] + if 'date_column' in table_info and 'older_than_days' in table_info: + date_column = table_info['date_column'] + older_than_days = table_info['older_than_days'] + select_query = f"SELECT * FROM {sschema_name}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{older_than_days} days'" + else: + select_query = f"SELECT * FROM {sschema_name}.{source_table_name}" + source_cur.execute(select_query) + rows = source_cur.fetchall() + select_count = source_cur.rowcount + logging.info(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") + + if select_count > 0: + for row in rows: + row_values = get_tablevalues(row) + insert_query = f"INSERT INTO {aschema_name}.{archive_table_name} VALUES ({row_values}) ON CONFLICT DO NOTHING" + archive_cur.execute(insert_query) + archive_conn.commit() + insert_count = archive_cur.rowcount + if insert_count == 0: + logging.warning(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") + else: + logging.info(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") + delete_query = f'DELETE FROM "{sschema_name}"."{source_table_name}" WHERE "{id_column}" = %s' + source_cur.execute(delete_query, (row[0],)) + source_conn.commit() + delete_count = source_cur.rowcount + logging.info(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + except (Exception, psycopg2.DatabaseError) as error: - print("Error during data archiving:", error) - finally: - if sourceCur is not None: - sourceCur.close() - if sourceConn is not None: - sourceConn.close() - print('Source database connection closed.') - if archiveCur is not None: - archiveCur.close() - if archiveConn is not None: - archiveConn.close() - print('Archive database connection closed.') + # Handle exceptions during the data archiving process + logging.error("Error during data archiving:", error) if __name__ == '__main__': + # Configure logging settings + logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') + + # Get database names, archive parameters, and source parameters db_names, archive_param, source_param = config() + + # Process each source database for db_name in db_names: - dbparam = source_param[db_name] - dbparam.update(archive_param) + # Combine source and archive parameters + db_param = source_param[db_name] + db_param.update(archive_param) + + # Read table information tables_info = read_tables_info(db_name) - dataArchive(db_name, dbparam, tables_info) + + # Archive data for the current source database + data_archive(db_name, db_param, tables_info) From ac486e7403d549b9bc4ba000b5c121a720c6b778 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 23 Nov 2023 00:51:30 +0530 Subject: [PATCH 017/130] Update .dockerignore Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/.dockerignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data-archive/.dockerignore b/data-archive/.dockerignore index 652fab52..efd2b6a9 100755 --- a/data-archive/.dockerignore +++ b/data-archive/.dockerignore @@ -1,4 +1,4 @@ - +archive-jobs/db.properties **/.git **/.gitignore **/.vscode From ebb92d16de424e76f2e037db3e941241cb3b7238 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 23 Nov 2023 01:00:44 +0530 Subject: [PATCH 018/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 3fc75714..a9323f9d 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -156,6 +156,8 @@ def data_archive(db_name, db_param, tables_info): """ try: # Connect to source and archive databases using context managers + print("db_param:", db_param) + logging.info("db_param: %s", db_param) with psycopg2.connect( user=db_param[f"{db_name}_SOURCE_DB_UNAME"], password=db_param[f"{db_name}_SOURCE_DB_PASS"], From add65f4280e27301af67c3c0fb574dc6665c0bc2 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 23 Nov 2023 01:09:35 +0530 Subject: [PATCH 019/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../archive-jobs/mosip_archive_main.py | 241 ++++++++---------- 1 file changed, 108 insertions(+), 133 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index a9323f9d..dd51a0bd 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -1,50 +1,34 @@ +# Import necessary libraries and modules import sys import os import psycopg2 import configparser import json -import logging from datetime import datetime -# Constants for keys and formats -DB_PROPERTIES_FILE = 'db.properties' -DATE_FORMAT = '%Y-%m-%d %H:%M:%S' - -class Config: - @staticmethod - def get_db_properties_file(): - return DB_PROPERTIES_FILE - +# Function to check if required keys are present in a section def check_keys(keys, section, prefix=""): - """ - Check if the required keys are present in the given configuration section. - - Args: - keys (list): List of required keys. - section (configparser.SectionProxy): Configuration section to check. - prefix (str, optional): Prefix to use for environment variables. Defaults to "". - """ for key in keys: env_key = f"{prefix}_{key}" if prefix else key if key not in section and env_key not in section: - logging.error(f"Error: {env_key} not found in {section} section.") + print(f"Error: {env_key} not found in {section} section.") sys.exit(1) +# Function to read configuration from file or environment variables def config(): # Define required keys for archive and database connection required_archive_keys = ['ARCHIVE_DB_HOST', 'ARCHIVE_DB_PORT', 'ARCHIVE_DB_NAME', 'ARCHIVE_SCHEMA_NAME', 'ARCHIVE_DB_UNAME', 'ARCHIVE_DB_PASS'] required_db_names_keys = ['DB_NAMES'] - # Initialize dictionaries to store parameters archive_param = {} source_param = {} db_names = [] # Check if db.properties file exists - if os.path.exists(Config.get_db_properties_file()): - logging.info("Using database connection parameters from db.properties.") + if os.path.exists('db.properties'): + print("Using database connection parameters from db.properties.") config_parser = configparser.ConfigParser() - config_parser.read(Config.get_db_properties_file()) + config_parser.read('db.properties') # Check if all required keys are present in ARCHIVE section check_keys(required_archive_keys, config_parser['ARCHIVE']) @@ -54,7 +38,8 @@ def config(): # Extract archive parameters and database names from the config file archive_param = {key.upper(): config_parser['ARCHIVE'][key] for key in config_parser['ARCHIVE']} - db_names = [name.strip() for name in config_parser.get('Databases', 'DB_NAMES').split(',')] + db_names = config_parser.get('Databases', 'DB_NAMES').split(',') + db_names = [name.strip() for name in db_names] # Extract source parameters for each database for db_name in db_names: @@ -62,7 +47,9 @@ def config(): check_keys(required_source_keys, config_parser[db_name], prefix=db_name) source_param[db_name] = create_source_param(config_parser=config_parser, env_vars=os.environ, db_name=db_name) else: - logging.error("Error: db.properties file not found. Using environment variables.") + # Handle case when db.properties file is not found + print("Error: db.properties file not found. Using environment variables.") + # Use environment variables check_keys(required_archive_keys, os.environ) # Extract database names from environment variables @@ -70,7 +57,7 @@ def config(): if db_names_env is not None: db_names = [name.strip() for name in db_names_env.split(',')] else: - logging.error("Error: DB_NAMES not found in environment variables.") + print("Error: DB_NAMES not found in environment variables.") sys.exit(1) # Extract source parameters for each database from environment variables @@ -82,151 +69,139 @@ def config(): # Return extracted parameters return db_names, archive_param, source_param +# Function to create source parameters for a specific database def create_source_param(config_parser, env_vars, db_name): - """ - Create source parameters for a specific database. - - Args: - config_parser (configparser.ConfigParser, optional): Configuration parser. Defaults to None. - env_vars (dict): Environment variables. - db_name (str): Database name. - - Returns: - dict: Source parameters. - """ param_keys = ['SOURCE_DB_HOST', 'SOURCE_DB_PORT', 'SOURCE_DB_NAME', 'SOURCE_SCHEMA_NAME', 'SOURCE_DB_UNAME', 'SOURCE_DB_PASS'] source_param = {} + # Extract source parameters from environment variables or config file for key in param_keys: env_key = f'{db_name}_{key}' source_param[env_key] = env_vars.get(env_key) or config_parser.get(db_name, env_key) return source_param +# Function to get formatted values for a row in a table def get_tablevalues(row): - """ - Get formatted values for a row in a table. - - Args: - row (list): Row data. - - Returns: - str: Formatted values. - """ - final_values = "" + finalValues = "" for value in row: if value is None: - final_values += "NULL," + finalValues += "NULL," else: - final_values += f"'{value}'," - final_values = final_values[:-1] - return final_values + finalValues += "'" + str(value) + "'," + finalValues = finalValues[:-1] + return finalValues +# Function to read table information from a JSON file or environment variable def read_tables_info(db_name): - """ - Read table information from a JSON file or environment variable. - - Args: - db_name (str): Database name. - - Returns: - list: List of table information dictionaries. - """ try: + # Attempt to read table information from a JSON file with open(f'{db_name.lower()}_archive_table_info.json') as f: tables_info = json.load(f) - logging.info(f"{db_name.lower()}_archive_table_info.json file found and loaded.") + print(f"{db_name.lower()}_archive_table_info.json file found and loaded.") return tables_info['tables_info'] except FileNotFoundError: - logging.error(f"{db_name.lower()}_archive_table_info.json file not found. Using environment variables.") + # Handle case when JSON file is not found + print(f"{db_name.lower()}_archive_table_info.json file not found. Using environment variables.") tables_info = os.environ.get(f"{db_name.lower()}_archive_table_info") if tables_info is None: - logging.error(f"Environment variable {db_name.lower()}_archive_table_info not found.") + print(f"Environment variable {db_name.lower()}_archive_table_info not found.") sys.exit(1) return json.loads(tables_info)['tables_info'] -def data_archive(db_name, db_param, tables_info): - """ - Archive data from a source database to an archive database. - - Args: - db_name (str): Database name. - db_param (dict): Database connection parameters. - tables_info (list): List of table information dictionaries. - """ +# Function to archive data from source to archive database +def dataArchive(db_name, dbparam, tables_info): + sourceConn = None + archiveConn = None + sourceCur = None + archiveCur = None try: - # Connect to source and archive databases using context managers - print("db_param:", db_param) - logging.info("db_param: %s", db_param) - with psycopg2.connect( - user=db_param[f"{db_name}_SOURCE_DB_UNAME"], - password=db_param[f"{db_name}_SOURCE_DB_PASS"], - host=db_param[f"{db_name}_SOURCE_DB_HOST"], - port=db_param[f"{db_name}_SOURCE_DB_PORT"], - database=db_param[f"{db_name}_SOURCE_DB_NAME"] - ) as source_conn, source_conn.cursor() as source_cur,\ - psycopg2.connect( - user=db_param["ARCHIVE_DB_UNAME"], - password=db_param["ARCHIVE_DB_PASS"], - host=db_param["ARCHIVE_DB_HOST"], - port=db_param["ARCHIVE_DB_PORT"], - database=db_param["ARCHIVE_DB_NAME"] - ) as archive_conn, archive_conn.cursor() as archive_cur: - - sschema_name = db_param[f"{db_name}_SOURCE_SCHEMA_NAME"] - aschema_name = db_param["ARCHIVE_SCHEMA_NAME"] - - for table_info in tables_info: - source_table_name = table_info['source_table'] - archive_table_name = table_info['archive_table'] - id_column = table_info['id_column'] - if 'date_column' in table_info and 'older_than_days' in table_info: - date_column = table_info['date_column'] - older_than_days = table_info['older_than_days'] - select_query = f"SELECT * FROM {sschema_name}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{older_than_days} days'" - else: - select_query = f"SELECT * FROM {sschema_name}.{source_table_name}" - source_cur.execute(select_query) - rows = source_cur.fetchall() - select_count = source_cur.rowcount - logging.info(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") - - if select_count > 0: - for row in rows: - row_values = get_tablevalues(row) - insert_query = f"INSERT INTO {aschema_name}.{archive_table_name} VALUES ({row_values}) ON CONFLICT DO NOTHING" - archive_cur.execute(insert_query) - archive_conn.commit() - insert_count = archive_cur.rowcount - if insert_count == 0: - logging.warning(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") - else: - logging.info(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") - delete_query = f'DELETE FROM "{sschema_name}"."{source_table_name}" WHERE "{id_column}" = %s' - source_cur.execute(delete_query, (row[0],)) - source_conn.commit() - delete_count = source_cur.rowcount - logging.info(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") - + print(f'Connecting to the PostgreSQL database for {db_name}...') + # Establish connections to source and archive databases + sourceConn = psycopg2.connect( + user=dbparam[f"{db_name}_SOURCE_DB_UNAME"], + password=dbparam[f"{db_name}_SOURCE_DB_PASS"], + host=dbparam[f"{db_name}_SOURCE_DB_HOST"], + port=dbparam[f"{db_name}_SOURCE_DB_PORT"], + database=dbparam[f"{db_name}_SOURCE_DB_NAME"] + ) + archiveConn = psycopg2.connect( + user=dbparam["ARCHIVE_DB_UNAME"], + password=dbparam["ARCHIVE_DB_PASS"], + host=dbparam["ARCHIVE_DB_HOST"], + port=dbparam["ARCHIVE_DB_PORT"], + database=dbparam["ARCHIVE_DB_NAME"] + ) + sourceCur = sourceConn.cursor() + archiveCur = archiveConn.cursor() + sschemaName = dbparam[f"{db_name}_SOURCE_SCHEMA_NAME"] + aschemaName = dbparam["ARCHIVE_SCHEMA_NAME"] + + # Loop through the list of table_info dictionaries + for table_info in tables_info: + source_table_name = table_info['source_table'] + archive_table_name = table_info['archive_table'] + id_column = table_info['id_column'] + if 'date_column' in table_info and 'older_than_days' in table_info: + date_column = table_info['date_column'] + older_than_days = table_info['older_than_days'] + # Construct a SELECT query with date-based filtering + select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{older_than_days} days'" + else: + # Construct a basic SELECT query + select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") + + if select_count > 0: + for row in rows: + rowValues = get_tablevalues(row) + # Construct an INSERT query to archive the selected row + insert_query = f"INSERT INTO {aschemaName}.{archive_table_name} VALUES ({rowValues}) ON CONFLICT DO NOTHING" + archiveCur.execute(insert_query) + archiveConn.commit() + insert_count = archiveCur.rowcount + if insert_count == 0: + print(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") + else: + print(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") + # Construct a DELETE query to remove the archived row from the source table + delete_query = f'DELETE FROM "{sschemaName}"."{source_table_name}" WHERE "{id_column}" = %s' + sourceCur.execute(delete_query, (row[0],)) + sourceConn.commit() + delete_count = sourceCur.rowcount + print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") except (Exception, psycopg2.DatabaseError) as error: # Handle exceptions during the data archiving process - logging.error("Error during data archiving:", error) - + print("Error during data archiving:", error) + finally: + # Close database connections + if sourceCur is not None: + sourceCur.close() + if sourceConn is not None: + sourceConn.close() + print(f'Source database connection for {db_name} closed.') + if archiveCur is not None: + archiveCur.close() + if archiveConn is not None: + archiveConn.close() + print('Archive database connection closed.') + +# Main execution when the script is run if __name__ == '__main__': - # Configure logging settings - logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') - # Get database names, archive parameters, and source parameters db_names, archive_param, source_param = config() # Process each source database for db_name in db_names: # Combine source and archive parameters - db_param = source_param[db_name] - db_param.update(archive_param) + dbparam = source_param[db_name] + dbparam.update(archive_param) # Read table information tables_info = read_tables_info(db_name) # Archive data for the current source database - data_archive(db_name, db_param, tables_info) + dataArchive(db_name, dbparam, tables_info) From 5fab9552d10842f283918e9ed0d65646b1d0bd16 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 23 Nov 2023 01:18:11 +0530 Subject: [PATCH 020/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 1 + 1 file changed, 1 insertion(+) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index dd51a0bd..74c39861 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -65,6 +65,7 @@ def config(): required_source_keys = ['SOURCE_DB_HOST', 'SOURCE_DB_PORT', 'SOURCE_DB_NAME', 'SOURCE_SCHEMA_NAME', 'SOURCE_DB_UNAME', 'SOURCE_DB_PASS'] check_keys(required_source_keys, os.environ, prefix=db_name) source_param[db_name] = create_source_param(config_parser=None, env_vars=os.environ, db_name=db_name) + print(f"archive_param: {archive_param}") # Return extracted parameters return db_names, archive_param, source_param From 061c6e94338ac48be0d7b7e1e9557691539be373 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 23 Nov 2023 01:27:32 +0530 Subject: [PATCH 021/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 74c39861..85bc7630 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -1,3 +1,5 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- # Import necessary libraries and modules import sys import os From 6b58a83e8ff8ced7724f7d7eb9d98177ed102afb Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 23 Nov 2023 01:47:40 +0530 Subject: [PATCH 022/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 85bc7630..0a6bc3c2 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -52,6 +52,14 @@ def config(): # Handle case when db.properties file is not found print("Error: db.properties file not found. Using environment variables.") # Use environment variables + archive_param = { + 'ARCHIVE_DB_HOST': os.environ.get('ARCHIVE_DB_HOST'), + 'ARCHIVE_DB_PORT': os.environ.get('ARCHIVE_DB_PORT'), + 'ARCHIVE_DB_NAME': os.environ.get('ARCHIVE_DB_NAME'), + 'ARCHIVE_SCHEMA_NAME': os.environ.get('ARCHIVE_SCHEMA_NAME'), + 'ARCHIVE_DB_UNAME': os.environ.get('ARCHIVE_DB_UNAME'), + 'ARCHIVE_DB_PASS': os.environ.get('ARCHIVE_DB_PASS') + } check_keys(required_archive_keys, os.environ) # Extract database names from environment variables From 8e9e91c673ec7e03c0d2a51acaa0d2db4501454e Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 23 Nov 2023 01:59:15 +0530 Subject: [PATCH 023/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 0a6bc3c2..337c119b 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -60,7 +60,7 @@ def config(): 'ARCHIVE_DB_UNAME': os.environ.get('ARCHIVE_DB_UNAME'), 'ARCHIVE_DB_PASS': os.environ.get('ARCHIVE_DB_PASS') } - check_keys(required_archive_keys, os.environ) + check_keys(required_archive_keys, archive_param) # Extract database names from environment variables db_names_env = os.environ.get('DB_NAMES') @@ -75,7 +75,6 @@ def config(): required_source_keys = ['SOURCE_DB_HOST', 'SOURCE_DB_PORT', 'SOURCE_DB_NAME', 'SOURCE_SCHEMA_NAME', 'SOURCE_DB_UNAME', 'SOURCE_DB_PASS'] check_keys(required_source_keys, os.environ, prefix=db_name) source_param[db_name] = create_source_param(config_parser=None, env_vars=os.environ, db_name=db_name) - print(f"archive_param: {archive_param}") # Return extracted parameters return db_names, archive_param, source_param From 3cec54833ca114e7062cb4561fda3df33735fac6 Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Thu, 7 Dec 2023 18:39:28 +0530 Subject: [PATCH 024/130] added parameterised query and dynamic query handling Signed-off-by: bhumi46 --- .../audit_archive_table_info.json | 3 +- .../credential_archive_table_info.json | 3 +- .../esignet_archive_table_info.json | 3 +- .../archive-jobs/ida_archive_table_info.json | 6 +- .../idrepo_archive_table_info.json | 9 +- .../kernel_archive_table_info.json | 3 +- .../master_archive_table_info.json | 18 ++-- .../archive-jobs/mosip_archive_main.py | 90 ++++++++++++------- .../archive-jobs/pms_archive_table_info.json | 9 +- .../regprc_archive_table_info.json | 15 ++-- .../resident_archive_table_info.json | 15 ++-- 11 files changed, 116 insertions(+), 58 deletions(-) diff --git a/data-archive/archive-jobs/audit_archive_table_info.json b/data-archive/archive-jobs/audit_archive_table_info.json index feebdca4..5e91fcb2 100644 --- a/data-archive/archive-jobs/audit_archive_table_info.json +++ b/data-archive/archive-jobs/audit_archive_table_info.json @@ -5,7 +5,8 @@ "archive_table": "mosip_audit_app_audit_log", "id_column": "log_id", "date_column": "log_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/credential_archive_table_info.json b/data-archive/archive-jobs/credential_archive_table_info.json index 63efcd92..aa2ee4eb 100644 --- a/data-archive/archive-jobs/credential_archive_table_info.json +++ b/data-archive/archive-jobs/credential_archive_table_info.json @@ -5,7 +5,8 @@ "archive_table": "mosip_credential_credential_transaction", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/esignet_archive_table_info.json b/data-archive/archive-jobs/esignet_archive_table_info.json index 27718ee8..921fbcad 100644 --- a/data-archive/archive-jobs/esignet_archive_table_info.json +++ b/data-archive/archive-jobs/esignet_archive_table_info.json @@ -5,7 +5,8 @@ "archive_table": "mosip_esignet_consent_history", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/ida_archive_table_info.json b/data-archive/archive-jobs/ida_archive_table_info.json index 93a7e987..048caf2f 100644 --- a/data-archive/archive-jobs/ida_archive_table_info.json +++ b/data-archive/archive-jobs/ida_archive_table_info.json @@ -5,14 +5,16 @@ "archive_table": "mosip_ida_credential_event_store", "id_column": "event_id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" }, { "source_table": "otp_transaction", "archive_table": "mosip_ida_otp_transaction", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "false" } ] } diff --git a/data-archive/archive-jobs/idrepo_archive_table_info.json b/data-archive/archive-jobs/idrepo_archive_table_info.json index 6df08992..eb2108a4 100644 --- a/data-archive/archive-jobs/idrepo_archive_table_info.json +++ b/data-archive/archive-jobs/idrepo_archive_table_info.json @@ -5,21 +5,24 @@ "archive_table": "mosip_idrepo_anonymous_profile", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" }, { "source_table": "credential_request_status", "archive_table": "mosip_idrepo_credential_request_status", "id_column": "individual_id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" }, { "source_table": "uin_draft", "archive_table": "mosip_idrepo_uin_draft", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/kernel_archive_table_info.json b/data-archive/archive-jobs/kernel_archive_table_info.json index 00fa570a..b02d5315 100644 --- a/data-archive/archive-jobs/kernel_archive_table_info.json +++ b/data-archive/archive-jobs/kernel_archive_table_info.json @@ -5,7 +5,8 @@ "archive_table": "mosip_kernel_otp_transaction", "id_column": "id", "date_column": "generated_dtimes", - "older_than_days": 7 + "retension_days": 7, + "need_archival": "false" } ] } diff --git a/data-archive/archive-jobs/master_archive_table_info.json b/data-archive/archive-jobs/master_archive_table_info.json index f19588dd..023ef89f 100644 --- a/data-archive/archive-jobs/master_archive_table_info.json +++ b/data-archive/archive-jobs/master_archive_table_info.json @@ -5,42 +5,48 @@ "archive_table": "mosip_master_bulkupload_transaction", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 91 + "retension_days": 91, + "need_archival": "true" }, { "source_table": "device_master_h", "archive_table": "mosip_master_device_master_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 365 + "retension_days": 365, + "need_archival": "true" }, { "source_table": "machine_master_h", "archive_table": "mosip_master_machine_master_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "registration_center_h", "archive_table": "mosip_master_registration_center_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 365 + "retension_days": 365 , + "need_archival": "true" }, { "source_table": "user_detail_h", "archive_table": "mosip_master_user_detail_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "zone_user_h", "archive_table": "mosip_master_zone_user_h", "id_column": "usr_id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 337c119b..ce9fe9c1 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -152,37 +152,66 @@ def dataArchive(db_name, dbparam, tables_info): source_table_name = table_info['source_table'] archive_table_name = table_info['archive_table'] id_column = table_info['id_column'] - if 'date_column' in table_info and 'older_than_days' in table_info: - date_column = table_info['date_column'] - older_than_days = table_info['older_than_days'] - # Construct a SELECT query with date-based filtering - select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{older_than_days} days'" - else: - # Construct a basic SELECT query - select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") - - if select_count > 0: - for row in rows: - rowValues = get_tablevalues(row) - # Construct an INSERT query to archive the selected row - insert_query = f"INSERT INTO {aschemaName}.{archive_table_name} VALUES ({rowValues}) ON CONFLICT DO NOTHING" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - if insert_count == 0: - print(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") - else: - print(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") - # Construct a DELETE query to remove the archived row from the source table + need_archival = table_info.get('need_archival', 'true').lower() # Default to 'true' if not provided + + if need_archival == 'true': + # Archiving is enabled + if 'date_column' in table_info and 'retention_days' in table_info: + date_column = table_info['date_column'] + retention_days = table_info['retention_days'] + # Construct a SELECT query with date-based filtering + select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{retention_days} days'" + else: + # Construct a basic SELECT query + select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") + + if select_count > 0: + for row in rows: + rowValues = get_tablevalues(row) + # Construct an INSERT query to archive the selected row + insert_query = f"INSERT INTO {aschemaName}.{archive_table_name} VALUES ({', '.join(['%s']*len(row))}) ON CONFLICT DO NOTHING" + archiveCur.execute(insert_query, row) + archiveConn.commit() + insert_count = archiveCur.rowcount + if insert_count == 0: + print(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") + else: + print(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") + # Construct a DELETE query with parameterized values + delete_query = f'DELETE FROM "{sschemaName}"."{source_table_name}" WHERE "{id_column}" = %s' + sourceCur.execute(delete_query, (row[0],)) + sourceConn.commit() + delete_count = sourceCur.rowcount + print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + elif need_archival == 'false': + # Archiving is disabled, execute a SELECT and DELETE from source + if 'date_column' in table_info and 'retention_days' in table_info: + date_column = table_info['date_column'] + retention_days = table_info['retention_days'] + # Construct a SELECT query with date-based filtering + select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{retention_days} days'" + else: + # Construct a basic SELECT query + select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(f"{select_count} Record(s) selected for deletion from {source_table_name} from source database {db_name}") + + if select_count > 0: + # Construct a DELETE query to remove the selected rows from the source table delete_query = f'DELETE FROM "{sschemaName}"."{source_table_name}" WHERE "{id_column}" = %s' - sourceCur.execute(delete_query, (row[0],)) - sourceConn.commit() - delete_count = sourceCur.rowcount - print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + for row in rows: + sourceCur.execute(delete_query, (row[0],)) + sourceConn.commit() + delete_count = sourceCur.rowcount + print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + else: + print(f"Error: Invalid value for 'need_archival' in table {source_table_name}. Use 'true' or 'false'.") except (Exception, psycopg2.DatabaseError) as error: # Handle exceptions during the data archiving process print("Error during data archiving:", error) @@ -215,3 +244,4 @@ def dataArchive(db_name, dbparam, tables_info): # Archive data for the current source database dataArchive(db_name, dbparam, tables_info) + diff --git a/data-archive/archive-jobs/pms_archive_table_info.json b/data-archive/archive-jobs/pms_archive_table_info.json index ca1fef84..1972fd1f 100644 --- a/data-archive/archive-jobs/pms_archive_table_info.json +++ b/data-archive/archive-jobs/pms_archive_table_info.json @@ -5,21 +5,24 @@ "archive_table": "mosip_pms_auth_policy_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "secure_biometric_interface_h", "archive_table": "mosip_pms_secure_biometric_interface_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "partner_h", "archive_table": "mosip_pms_partner_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/regprc_archive_table_info.json b/data-archive/archive-jobs/regprc_archive_table_info.json index 870d6669..e0fdd88c 100644 --- a/data-archive/archive-jobs/regprc_archive_table_info.json +++ b/data-archive/archive-jobs/regprc_archive_table_info.json @@ -5,35 +5,40 @@ "archive_table": "mosip_regprc_abis_response_det", "id_column": "abis_resp_id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "abis_response", "archive_table": "mosip_regprc_abis_response", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "abis_request", "archive_table": "mosip_regprc_abis_request", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "reg_demo_dedupe_list", "archive_table": "mosip_regprc_reg_demo_dedupe_list", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "registration_transaction", "archive_table": "mosip_regprc_registration_transaction", "id_column": "regtrn_id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/resident_archive_table_info.json b/data-archive/archive-jobs/resident_archive_table_info.json index 1b1df885..0024d61c 100644 --- a/data-archive/archive-jobs/resident_archive_table_info.json +++ b/data-archive/archive-jobs/resident_archive_table_info.json @@ -5,35 +5,40 @@ "archive_table": "mosip_resident_otp_transaction", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "false" }, { "source_table": "resident_grievance_ticket", "archive_table": "mosip_resident_grievance_ticket", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 365 + "retension_days": 365, + "need_archival": "true" }, { "source_table": "resident_session", "archive_table": "mosip_resident_session", "id_column": "session_id", "date_column": "login_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" }, { "source_table": "resident_transaction", "archive_table": "mosip_resident_transaction", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 365 + "retension_days": 365, + "need_archival": "true" }, { "source_table": "resident_user_actions", "archive_table": "mosip_resident_user_actions", "id_column": "ida_token", "date_column": "last_bell_notif_click_dtimes", - "older_than_days": 365 + "retension_days": 365, + "need_archival": "true" } ] } From b12d8b796302afdb39d6a24e5a90f04ca8765cef Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Mon, 11 Dec 2023 18:48:05 +0530 Subject: [PATCH 025/130] Created prereg_archive_table_info.json Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../archive-jobs/prereg_archive_table_info.json | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 data-archive/archive-jobs/prereg_archive_table_info.json diff --git a/data-archive/archive-jobs/prereg_archive_table_info.json b/data-archive/archive-jobs/prereg_archive_table_info.json new file mode 100644 index 00000000..10b172ce --- /dev/null +++ b/data-archive/archive-jobs/prereg_archive_table_info.json @@ -0,0 +1,12 @@ +{ + "tables_info": [ + { + "source_table": "otp_transaction", + "archive_table": "mosip_prereg_otp_transaction", + "id_column": "id", + "date_column": "cr_dtimes", + "retension_days": 30, + "need_archival": "false" + } + ] +} From 567b9f7706134d9e2b3e6b86bff238bfc0297acc Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Mon, 11 Dec 2023 18:54:44 +0530 Subject: [PATCH 026/130] Created archive-prereg-otp_transactio.sqln Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../ddl/archive-prereg-otp_transactio.sqln | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transactio.sqln diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transactio.sqln b/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transactio.sqln new file mode 100644 index 00000000..a97bb61b --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transactio.sqln @@ -0,0 +1,35 @@ +-- This is used to save the OTP for the user whenever user requests for one using the email id / phone number to log into the application. + +CREATE TABLE archive.mosip_prereg_otp_transaction( + id character varying(36) NOT NULL, + ref_id character varying(64) NOT NULL, + otp_hash character varying(512) NOT NULL, + generated_dtimes timestamp, + expiry_dtimes timestamp, + validation_retry_count smallint, + status_code character varying(36), + lang_code character varying(3), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_otpt_id PRIMARY KEY (id) +); + +COMMENT ON TABLE archive.mosip_prereg_otp_transaction IS 'All OTP related data and validation details are maintained here for Pre Registration module.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.id IS 'OTP id is a unique identifier (UUID) used as an unique key to identify the OTP transaction'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.ref_id IS 'Reference ID is a reference information received from OTP requester which can be used while validating the OTP. AM: please give examples of ref_id'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.otp_hash IS 'Hash of id, ref_id and otp which is generated based on the configuration setup and sent to the requester application / module.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.generated_dtimes IS 'Date and Time when the OTP was generated'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.expiry_dtimes IS 'Date Time when the OTP will be expired'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.validation_retry_count IS 'Validation retry counts of this OTP request. If the validation retry crosses the threshold limit, then the OTP will be de-activated.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.status_code IS 'Current status of the transaction. Refers to code field of master.status_list table.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.lang_code IS 'For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.cr_by IS 'ID or name of the user who create / insert record.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.cr_dtimes IS 'Date and Timestamp when the record is created/inserted'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.upd_by IS 'ID or name of the user who update the record with new values'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.upd_dtimes IS 'Date and Timestamp when any of the fields in the record is updated with new values.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.is_deleted IS 'Flag to mark whether the record is Soft deleted.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.del_dtimes IS 'Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; From 6fae1d4e464a8a1290830e937b84753c0bfef183 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Mon, 11 Dec 2023 18:56:13 +0530 Subject: [PATCH 027/130] Rename archive-prereg-otp_transactio.sqln to archive-prereg-otp_transaction.sql Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- ...reg-otp_transactio.sqln => archive-prereg-otp_transaction.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename data-archive/db_scripts/mosip_archive/ddl/{archive-prereg-otp_transactio.sqln => archive-prereg-otp_transaction.sql} (100%) diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transactio.sqln b/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transaction.sql similarity index 100% rename from data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transactio.sqln rename to data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transaction.sql From 068a494ec4c7732dfd3e7a5ad541e477a1c608aa Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Mon, 11 Dec 2023 18:58:09 +0530 Subject: [PATCH 028/130] Update ddl.sql Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/db_scripts/mosip_archive/ddl.sql | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/data-archive/db_scripts/mosip_archive/ddl.sql b/data-archive/db_scripts/mosip_archive/ddl.sql index 43df85d5..89635ba6 100644 --- a/data-archive/db_scripts/mosip_archive/ddl.sql +++ b/data-archive/db_scripts/mosip_archive/ddl.sql @@ -31,6 +31,7 @@ \ir ddl/archive-pms-auth_policy_h.sql \ir ddl/archive-pms-partner_h.sql \ir ddl/archive-pms-secure_biometric_interface_h.sql +\ir ddl/archive-prereg-otp_transaction.sql \ir ddl/archive-resident_grievance_ticket.sql \ir ddl/archive-resident-otp_transaction.sql \ir ddl/archive-resident_session.sql @@ -40,4 +41,4 @@ \ir ddl/archive-regprc-registration_transaction.sql \ir ddl/archive-regprc-abis_response_det.sql \ir ddl/archive-regprc-abis_response.sql -\ir ddl/archive-regprc-abis_request.sql \ No newline at end of file +\ir ddl/archive-regprc-abis_request.sql From 8f9e8c64c5ff207d4b52c1e2f9db0f185b2b9c7a Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 14 Dec 2023 20:33:35 +0530 Subject: [PATCH 029/130] [MOSIP-29979] updated otp transaction deletion in json (#10) * added parameterised query and dynamic query handling Signed-off-by: bhumi46 * Created prereg_archive_table_info.json Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> * Created archive-prereg-otp_transactio.sqln Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> * Rename archive-prereg-otp_transactio.sqln to archive-prereg-otp_transaction.sql Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> * Update ddl.sql Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --------- Signed-off-by: bhumi46 Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../audit_archive_table_info.json | 3 +- .../credential_archive_table_info.json | 3 +- .../esignet_archive_table_info.json | 3 +- .../archive-jobs/ida_archive_table_info.json | 6 +- .../idrepo_archive_table_info.json | 9 +- .../kernel_archive_table_info.json | 3 +- .../master_archive_table_info.json | 18 ++-- .../archive-jobs/mosip_archive_main.py | 90 ++++++++++++------- .../archive-jobs/pms_archive_table_info.json | 9 +- .../prereg_archive_table_info.json | 12 +++ .../regprc_archive_table_info.json | 15 ++-- .../resident_archive_table_info.json | 15 ++-- data-archive/db_scripts/mosip_archive/ddl.sql | 3 +- .../ddl/archive-prereg-otp_transaction.sql | 35 ++++++++ 14 files changed, 165 insertions(+), 59 deletions(-) create mode 100644 data-archive/archive-jobs/prereg_archive_table_info.json create mode 100644 data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transaction.sql diff --git a/data-archive/archive-jobs/audit_archive_table_info.json b/data-archive/archive-jobs/audit_archive_table_info.json index feebdca4..5e91fcb2 100644 --- a/data-archive/archive-jobs/audit_archive_table_info.json +++ b/data-archive/archive-jobs/audit_archive_table_info.json @@ -5,7 +5,8 @@ "archive_table": "mosip_audit_app_audit_log", "id_column": "log_id", "date_column": "log_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/credential_archive_table_info.json b/data-archive/archive-jobs/credential_archive_table_info.json index 63efcd92..aa2ee4eb 100644 --- a/data-archive/archive-jobs/credential_archive_table_info.json +++ b/data-archive/archive-jobs/credential_archive_table_info.json @@ -5,7 +5,8 @@ "archive_table": "mosip_credential_credential_transaction", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/esignet_archive_table_info.json b/data-archive/archive-jobs/esignet_archive_table_info.json index 27718ee8..921fbcad 100644 --- a/data-archive/archive-jobs/esignet_archive_table_info.json +++ b/data-archive/archive-jobs/esignet_archive_table_info.json @@ -5,7 +5,8 @@ "archive_table": "mosip_esignet_consent_history", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/ida_archive_table_info.json b/data-archive/archive-jobs/ida_archive_table_info.json index 93a7e987..048caf2f 100644 --- a/data-archive/archive-jobs/ida_archive_table_info.json +++ b/data-archive/archive-jobs/ida_archive_table_info.json @@ -5,14 +5,16 @@ "archive_table": "mosip_ida_credential_event_store", "id_column": "event_id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" }, { "source_table": "otp_transaction", "archive_table": "mosip_ida_otp_transaction", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "false" } ] } diff --git a/data-archive/archive-jobs/idrepo_archive_table_info.json b/data-archive/archive-jobs/idrepo_archive_table_info.json index 6df08992..eb2108a4 100644 --- a/data-archive/archive-jobs/idrepo_archive_table_info.json +++ b/data-archive/archive-jobs/idrepo_archive_table_info.json @@ -5,21 +5,24 @@ "archive_table": "mosip_idrepo_anonymous_profile", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" }, { "source_table": "credential_request_status", "archive_table": "mosip_idrepo_credential_request_status", "id_column": "individual_id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" }, { "source_table": "uin_draft", "archive_table": "mosip_idrepo_uin_draft", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/kernel_archive_table_info.json b/data-archive/archive-jobs/kernel_archive_table_info.json index 00fa570a..b02d5315 100644 --- a/data-archive/archive-jobs/kernel_archive_table_info.json +++ b/data-archive/archive-jobs/kernel_archive_table_info.json @@ -5,7 +5,8 @@ "archive_table": "mosip_kernel_otp_transaction", "id_column": "id", "date_column": "generated_dtimes", - "older_than_days": 7 + "retension_days": 7, + "need_archival": "false" } ] } diff --git a/data-archive/archive-jobs/master_archive_table_info.json b/data-archive/archive-jobs/master_archive_table_info.json index f19588dd..023ef89f 100644 --- a/data-archive/archive-jobs/master_archive_table_info.json +++ b/data-archive/archive-jobs/master_archive_table_info.json @@ -5,42 +5,48 @@ "archive_table": "mosip_master_bulkupload_transaction", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 91 + "retension_days": 91, + "need_archival": "true" }, { "source_table": "device_master_h", "archive_table": "mosip_master_device_master_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 365 + "retension_days": 365, + "need_archival": "true" }, { "source_table": "machine_master_h", "archive_table": "mosip_master_machine_master_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "registration_center_h", "archive_table": "mosip_master_registration_center_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 365 + "retension_days": 365 , + "need_archival": "true" }, { "source_table": "user_detail_h", "archive_table": "mosip_master_user_detail_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "zone_user_h", "archive_table": "mosip_master_zone_user_h", "id_column": "usr_id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 337c119b..ce9fe9c1 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -152,37 +152,66 @@ def dataArchive(db_name, dbparam, tables_info): source_table_name = table_info['source_table'] archive_table_name = table_info['archive_table'] id_column = table_info['id_column'] - if 'date_column' in table_info and 'older_than_days' in table_info: - date_column = table_info['date_column'] - older_than_days = table_info['older_than_days'] - # Construct a SELECT query with date-based filtering - select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{older_than_days} days'" - else: - # Construct a basic SELECT query - select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount - print(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") - - if select_count > 0: - for row in rows: - rowValues = get_tablevalues(row) - # Construct an INSERT query to archive the selected row - insert_query = f"INSERT INTO {aschemaName}.{archive_table_name} VALUES ({rowValues}) ON CONFLICT DO NOTHING" - archiveCur.execute(insert_query) - archiveConn.commit() - insert_count = archiveCur.rowcount - if insert_count == 0: - print(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") - else: - print(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") - # Construct a DELETE query to remove the archived row from the source table + need_archival = table_info.get('need_archival', 'true').lower() # Default to 'true' if not provided + + if need_archival == 'true': + # Archiving is enabled + if 'date_column' in table_info and 'retention_days' in table_info: + date_column = table_info['date_column'] + retention_days = table_info['retention_days'] + # Construct a SELECT query with date-based filtering + select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{retention_days} days'" + else: + # Construct a basic SELECT query + select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") + + if select_count > 0: + for row in rows: + rowValues = get_tablevalues(row) + # Construct an INSERT query to archive the selected row + insert_query = f"INSERT INTO {aschemaName}.{archive_table_name} VALUES ({', '.join(['%s']*len(row))}) ON CONFLICT DO NOTHING" + archiveCur.execute(insert_query, row) + archiveConn.commit() + insert_count = archiveCur.rowcount + if insert_count == 0: + print(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") + else: + print(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") + # Construct a DELETE query with parameterized values + delete_query = f'DELETE FROM "{sschemaName}"."{source_table_name}" WHERE "{id_column}" = %s' + sourceCur.execute(delete_query, (row[0],)) + sourceConn.commit() + delete_count = sourceCur.rowcount + print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + elif need_archival == 'false': + # Archiving is disabled, execute a SELECT and DELETE from source + if 'date_column' in table_info and 'retention_days' in table_info: + date_column = table_info['date_column'] + retention_days = table_info['retention_days'] + # Construct a SELECT query with date-based filtering + select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{retention_days} days'" + else: + # Construct a basic SELECT query + select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" + sourceCur.execute(select_query) + rows = sourceCur.fetchall() + select_count = sourceCur.rowcount + print(f"{select_count} Record(s) selected for deletion from {source_table_name} from source database {db_name}") + + if select_count > 0: + # Construct a DELETE query to remove the selected rows from the source table delete_query = f'DELETE FROM "{sschemaName}"."{source_table_name}" WHERE "{id_column}" = %s' - sourceCur.execute(delete_query, (row[0],)) - sourceConn.commit() - delete_count = sourceCur.rowcount - print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + for row in rows: + sourceCur.execute(delete_query, (row[0],)) + sourceConn.commit() + delete_count = sourceCur.rowcount + print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + else: + print(f"Error: Invalid value for 'need_archival' in table {source_table_name}. Use 'true' or 'false'.") except (Exception, psycopg2.DatabaseError) as error: # Handle exceptions during the data archiving process print("Error during data archiving:", error) @@ -215,3 +244,4 @@ def dataArchive(db_name, dbparam, tables_info): # Archive data for the current source database dataArchive(db_name, dbparam, tables_info) + diff --git a/data-archive/archive-jobs/pms_archive_table_info.json b/data-archive/archive-jobs/pms_archive_table_info.json index ca1fef84..1972fd1f 100644 --- a/data-archive/archive-jobs/pms_archive_table_info.json +++ b/data-archive/archive-jobs/pms_archive_table_info.json @@ -5,21 +5,24 @@ "archive_table": "mosip_pms_auth_policy_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "secure_biometric_interface_h", "archive_table": "mosip_pms_secure_biometric_interface_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "partner_h", "archive_table": "mosip_pms_partner_h", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/prereg_archive_table_info.json b/data-archive/archive-jobs/prereg_archive_table_info.json new file mode 100644 index 00000000..10b172ce --- /dev/null +++ b/data-archive/archive-jobs/prereg_archive_table_info.json @@ -0,0 +1,12 @@ +{ + "tables_info": [ + { + "source_table": "otp_transaction", + "archive_table": "mosip_prereg_otp_transaction", + "id_column": "id", + "date_column": "cr_dtimes", + "retension_days": 30, + "need_archival": "false" + } + ] +} diff --git a/data-archive/archive-jobs/regprc_archive_table_info.json b/data-archive/archive-jobs/regprc_archive_table_info.json index 870d6669..e0fdd88c 100644 --- a/data-archive/archive-jobs/regprc_archive_table_info.json +++ b/data-archive/archive-jobs/regprc_archive_table_info.json @@ -5,35 +5,40 @@ "archive_table": "mosip_regprc_abis_response_det", "id_column": "abis_resp_id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "abis_response", "archive_table": "mosip_regprc_abis_response", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "abis_request", "archive_table": "mosip_regprc_abis_request", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "reg_demo_dedupe_list", "archive_table": "mosip_regprc_reg_demo_dedupe_list", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" }, { "source_table": "registration_transaction", "archive_table": "mosip_regprc_registration_transaction", "id_column": "regtrn_id", "date_column": "cr_dtimes", - "older_than_days": 183 + "retension_days": 183, + "need_archival": "true" } ] } diff --git a/data-archive/archive-jobs/resident_archive_table_info.json b/data-archive/archive-jobs/resident_archive_table_info.json index 1b1df885..0024d61c 100644 --- a/data-archive/archive-jobs/resident_archive_table_info.json +++ b/data-archive/archive-jobs/resident_archive_table_info.json @@ -5,35 +5,40 @@ "archive_table": "mosip_resident_otp_transaction", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "false" }, { "source_table": "resident_grievance_ticket", "archive_table": "mosip_resident_grievance_ticket", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 365 + "retension_days": 365, + "need_archival": "true" }, { "source_table": "resident_session", "archive_table": "mosip_resident_session", "id_column": "session_id", "date_column": "login_dtimes", - "older_than_days": 30 + "retension_days": 30, + "need_archival": "true" }, { "source_table": "resident_transaction", "archive_table": "mosip_resident_transaction", "id_column": "id", "date_column": "cr_dtimes", - "older_than_days": 365 + "retension_days": 365, + "need_archival": "true" }, { "source_table": "resident_user_actions", "archive_table": "mosip_resident_user_actions", "id_column": "ida_token", "date_column": "last_bell_notif_click_dtimes", - "older_than_days": 365 + "retension_days": 365, + "need_archival": "true" } ] } diff --git a/data-archive/db_scripts/mosip_archive/ddl.sql b/data-archive/db_scripts/mosip_archive/ddl.sql index 43df85d5..89635ba6 100644 --- a/data-archive/db_scripts/mosip_archive/ddl.sql +++ b/data-archive/db_scripts/mosip_archive/ddl.sql @@ -31,6 +31,7 @@ \ir ddl/archive-pms-auth_policy_h.sql \ir ddl/archive-pms-partner_h.sql \ir ddl/archive-pms-secure_biometric_interface_h.sql +\ir ddl/archive-prereg-otp_transaction.sql \ir ddl/archive-resident_grievance_ticket.sql \ir ddl/archive-resident-otp_transaction.sql \ir ddl/archive-resident_session.sql @@ -40,4 +41,4 @@ \ir ddl/archive-regprc-registration_transaction.sql \ir ddl/archive-regprc-abis_response_det.sql \ir ddl/archive-regprc-abis_response.sql -\ir ddl/archive-regprc-abis_request.sql \ No newline at end of file +\ir ddl/archive-regprc-abis_request.sql diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transaction.sql new file mode 100644 index 00000000..a97bb61b --- /dev/null +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transaction.sql @@ -0,0 +1,35 @@ +-- This is used to save the OTP for the user whenever user requests for one using the email id / phone number to log into the application. + +CREATE TABLE archive.mosip_prereg_otp_transaction( + id character varying(36) NOT NULL, + ref_id character varying(64) NOT NULL, + otp_hash character varying(512) NOT NULL, + generated_dtimes timestamp, + expiry_dtimes timestamp, + validation_retry_count smallint, + status_code character varying(36), + lang_code character varying(3), + cr_by character varying(256) NOT NULL, + cr_dtimes timestamp NOT NULL, + upd_by character varying(256), + upd_dtimes timestamp, + is_deleted boolean, + del_dtimes timestamp, + CONSTRAINT pk_otpt_id PRIMARY KEY (id) +); + +COMMENT ON TABLE archive.mosip_prereg_otp_transaction IS 'All OTP related data and validation details are maintained here for Pre Registration module.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.id IS 'OTP id is a unique identifier (UUID) used as an unique key to identify the OTP transaction'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.ref_id IS 'Reference ID is a reference information received from OTP requester which can be used while validating the OTP. AM: please give examples of ref_id'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.otp_hash IS 'Hash of id, ref_id and otp which is generated based on the configuration setup and sent to the requester application / module.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.generated_dtimes IS 'Date and Time when the OTP was generated'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.expiry_dtimes IS 'Date Time when the OTP will be expired'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.validation_retry_count IS 'Validation retry counts of this OTP request. If the validation retry crosses the threshold limit, then the OTP will be de-activated.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.status_code IS 'Current status of the transaction. Refers to code field of master.status_list table.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.lang_code IS 'For multilanguage implementation this attribute Refers master.language.code. The value of some of the attributes in current record is stored in this respective language.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.cr_by IS 'ID or name of the user who create / insert record.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.cr_dtimes IS 'Date and Timestamp when the record is created/inserted'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.upd_by IS 'ID or name of the user who update the record with new values'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.upd_dtimes IS 'Date and Timestamp when any of the fields in the record is updated with new values.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.is_deleted IS 'Flag to mark whether the record is Soft deleted.'; +COMMENT ON COLUMN archive.mosip_prereg_otp_transaction.del_dtimes IS 'Date and Timestamp when the record is soft deleted with is_deleted=TRUE'; From deea60309d33207c3be97e3677fd06c3e87dd6bc Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Sun, 17 Dec 2023 23:57:05 +0530 Subject: [PATCH 030/130] json passed as configmaps Signed-off-by: bhumi46 --- .../idrepo_archive_table_info.json | 6 +- .../kernel_archive_table_info.json | 2 +- .../master_archive_table_info.json | 12 +- .../archive-jobs/mosip_archive_main.py | 177 +++++++++--------- .../archive-jobs/pms_archive_table_info.json | 6 +- .../prereg_archive_table_info.json | 2 +- .../regprc_archive_table_info.json | 10 +- .../resident_archive_table_info.json | 10 +- 8 files changed, 112 insertions(+), 113 deletions(-) diff --git a/data-archive/archive-jobs/idrepo_archive_table_info.json b/data-archive/archive-jobs/idrepo_archive_table_info.json index eb2108a4..b1080225 100644 --- a/data-archive/archive-jobs/idrepo_archive_table_info.json +++ b/data-archive/archive-jobs/idrepo_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "credential_request_status", @@ -14,7 +14,7 @@ "id_column": "individual_id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "uin_draft", @@ -22,7 +22,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "true" + "need_archival": "archive" } ] } diff --git a/data-archive/archive-jobs/kernel_archive_table_info.json b/data-archive/archive-jobs/kernel_archive_table_info.json index b02d5315..82689f6c 100644 --- a/data-archive/archive-jobs/kernel_archive_table_info.json +++ b/data-archive/archive-jobs/kernel_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "generated_dtimes", "retension_days": 7, - "need_archival": "false" + "need_archival": "delete" } ] } diff --git a/data-archive/archive-jobs/master_archive_table_info.json b/data-archive/archive-jobs/master_archive_table_info.json index 023ef89f..4a116103 100644 --- a/data-archive/archive-jobs/master_archive_table_info.json +++ b/data-archive/archive-jobs/master_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 91, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "device_master_h", @@ -14,7 +14,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 365, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "machine_master_h", @@ -22,7 +22,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "registration_center_h", @@ -30,7 +30,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 365 , - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "user_detail_h", @@ -38,7 +38,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "zone_user_h", @@ -46,7 +46,7 @@ "id_column": "usr_id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" } ] } diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index ce9fe9c1..c55b3081 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Import necessary libraries and modules + import sys import os import psycopg2 @@ -93,155 +93,154 @@ def create_source_param(config_parser, env_vars, db_name): # Function to get formatted values for a row in a table def get_tablevalues(row): - finalValues = "" + final_values = "" for value in row: if value is None: - finalValues += "NULL," + final_values += "NULL," else: - finalValues += "'" + str(value) + "'," - finalValues = finalValues[:-1] - return finalValues + final_values += "'" + str(value) + "'," + final_values = final_values[:-1] + return final_values # Function to read table information from a JSON file or environment variable def read_tables_info(db_name): + try: + # Attempt to read table information from environment variables + tables_info_str = os.environ.get(f"{db_name.lower()}_archive_table_info") + if tables_info_str is not None: + tables_info = json.loads(tables_info_str)['tables_info'] + print(f"Table information loaded from environment variables for {db_name}.") + return tables_info + except json.JSONDecodeError: + print(f"Error decoding JSON from environment variable {db_name.lower()}_archive_table_info.") + try: # Attempt to read table information from a JSON file with open(f'{db_name.lower()}_archive_table_info.json') as f: - tables_info = json.load(f) + tables_info = json.load(f)['tables_info'] print(f"{db_name.lower()}_archive_table_info.json file found and loaded.") - return tables_info['tables_info'] + return tables_info except FileNotFoundError: # Handle case when JSON file is not found - print(f"{db_name.lower()}_archive_table_info.json file not found. Using environment variables.") - tables_info = os.environ.get(f"{db_name.lower()}_archive_table_info") - if tables_info is None: - print(f"Environment variable {db_name.lower()}_archive_table_info not found.") - sys.exit(1) - return json.loads(tables_info)['tables_info'] + print(f"{db_name.lower()}_archive_table_info.json file not found.") + sys.exit(1) # Function to archive data from source to archive database -def dataArchive(db_name, dbparam, tables_info): - sourceConn = None - archiveConn = None - sourceCur = None - archiveCur = None +def data_archive(db_name, db_param, tables_info): + source_conn = None + archive_conn = None + source_cur = None + archive_cur = None try: print(f'Connecting to the PostgreSQL database for {db_name}...') # Establish connections to source and archive databases - sourceConn = psycopg2.connect( - user=dbparam[f"{db_name}_SOURCE_DB_UNAME"], - password=dbparam[f"{db_name}_SOURCE_DB_PASS"], - host=dbparam[f"{db_name}_SOURCE_DB_HOST"], - port=dbparam[f"{db_name}_SOURCE_DB_PORT"], - database=dbparam[f"{db_name}_SOURCE_DB_NAME"] + source_conn = psycopg2.connect( + user=db_param[f"{db_name}_SOURCE_DB_UNAME"], + password=db_param[f"{db_name}_SOURCE_DB_PASS"], + host=db_param[f"{db_name}_SOURCE_DB_HOST"], + port=db_param[f"{db_name}_SOURCE_DB_PORT"], + database=db_param[f"{db_name}_SOURCE_DB_NAME"] ) - archiveConn = psycopg2.connect( - user=dbparam["ARCHIVE_DB_UNAME"], - password=dbparam["ARCHIVE_DB_PASS"], - host=dbparam["ARCHIVE_DB_HOST"], - port=dbparam["ARCHIVE_DB_PORT"], - database=dbparam["ARCHIVE_DB_NAME"] + archive_conn = psycopg2.connect( + user=db_param["ARCHIVE_DB_UNAME"], + password=db_param["ARCHIVE_DB_PASS"], + host=db_param["ARCHIVE_DB_HOST"], + port=db_param["ARCHIVE_DB_PORT"], + database=db_param["ARCHIVE_DB_NAME"] ) - sourceCur = sourceConn.cursor() - archiveCur = archiveConn.cursor() - sschemaName = dbparam[f"{db_name}_SOURCE_SCHEMA_NAME"] - aschemaName = dbparam["ARCHIVE_SCHEMA_NAME"] + source_cur = source_conn.cursor() + archive_cur = archive_conn.cursor() + sschema_name = db_param[f"{db_name}_SOURCE_SCHEMA_NAME"] + aschema_name = db_param["ARCHIVE_SCHEMA_NAME"] - # Loop through the list of table_info dictionaries for table_info in tables_info: source_table_name = table_info['source_table'] archive_table_name = table_info['archive_table'] id_column = table_info['id_column'] - need_archival = table_info.get('need_archival', 'true').lower() # Default to 'true' if not provided + need_archival = table_info.get('need_archival', 'none').lower() - if need_archival == 'true': - # Archiving is enabled + if need_archival == 'archive': if 'date_column' in table_info and 'retention_days' in table_info: date_column = table_info['date_column'] retention_days = table_info['retention_days'] - # Construct a SELECT query with date-based filtering - select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{retention_days} days'" + select_query = f"SELECT * FROM {sschema_name}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{retention_days} days'" else: - # Construct a basic SELECT query - select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount + select_query = f"SELECT * FROM {sschema_name}.{source_table_name}" + source_cur.execute(select_query) + rows = source_cur.fetchall() + select_count = source_cur.rowcount print(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") if select_count > 0: for row in rows: - rowValues = get_tablevalues(row) - # Construct an INSERT query to archive the selected row - insert_query = f"INSERT INTO {aschemaName}.{archive_table_name} VALUES ({', '.join(['%s']*len(row))}) ON CONFLICT DO NOTHING" - archiveCur.execute(insert_query, row) - archiveConn.commit() - insert_count = archiveCur.rowcount + row_values = get_tablevalues(row) + insert_query = f"INSERT INTO {aschema_name}.{archive_table_name} VALUES ({', '.join(['%s']*len(row))}) ON CONFLICT DO NOTHING" + archive_cur.execute(insert_query, row) + archive_conn.commit() + insert_count = archive_cur.rowcount if insert_count == 0: print(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") else: print(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") - # Construct a DELETE query with parameterized values - delete_query = f'DELETE FROM "{sschemaName}"."{source_table_name}" WHERE "{id_column}" = %s' - sourceCur.execute(delete_query, (row[0],)) - sourceConn.commit() - delete_count = sourceCur.rowcount + delete_query = f'DELETE FROM "{sschema_name}"."{source_table_name}" WHERE "{id_column}" = %s' + source_cur.execute(delete_query, (row[0],)) + source_conn.commit() + delete_count = source_cur.rowcount print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") - elif need_archival == 'false': - # Archiving is disabled, execute a SELECT and DELETE from source + + elif need_archival == 'delete': if 'date_column' in table_info and 'retention_days' in table_info: date_column = table_info['date_column'] retention_days = table_info['retention_days'] - # Construct a SELECT query with date-based filtering - select_query = f"SELECT * FROM {sschemaName}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{retention_days} days'" + select_query = f"SELECT * FROM {sschema_name}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{retention_days} days'" else: - # Construct a basic SELECT query - select_query = f"SELECT * FROM {sschemaName}.{source_table_name}" - sourceCur.execute(select_query) - rows = sourceCur.fetchall() - select_count = sourceCur.rowcount + select_query = f"SELECT * FROM {sschema_name}.{source_table_name}" + source_cur.execute(select_query) + rows = source_cur.fetchall() + select_count = source_cur.rowcount print(f"{select_count} Record(s) selected for deletion from {source_table_name} from source database {db_name}") if select_count > 0: - # Construct a DELETE query to remove the selected rows from the source table - delete_query = f'DELETE FROM "{sschemaName}"."{source_table_name}" WHERE "{id_column}" = %s' + delete_query = f'DELETE FROM "{sschema_name}"."{source_table_name}" WHERE "{id_column}" = %s' for row in rows: - sourceCur.execute(delete_query, (row[0],)) - sourceConn.commit() - delete_count = sourceCur.rowcount + source_cur.execute(delete_query, (row[0],)) + source_conn.commit() + delete_count = source_cur.rowcount print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + + elif need_archival == 'none': + print(f"Skipping archival for table {source_table_name} from source database {db_name}") + else: - print(f"Error: Invalid value for 'need_archival' in table {source_table_name}. Use 'true' or 'false'.") + print(f"Error: Invalid value for 'need_archival' in table {source_table_name}. Use 'archive', 'delete', or 'none'.") + except (Exception, psycopg2.DatabaseError) as error: - # Handle exceptions during the data archiving process print("Error during data archiving:", error) finally: - # Close database connections - if sourceCur is not None: - sourceCur.close() - if sourceConn is not None: - sourceConn.close() + if source_cur is not None: + source_cur.close() + if source_conn is not None: + source_conn.close() print(f'Source database connection for {db_name} closed.') - if archiveCur is not None: - archiveCur.close() - if archiveConn is not None: - archiveConn.close() + if archive_cur is not None: + archive_cur.close() + if archive_conn is not None: + archive_conn.close() print('Archive database connection closed.') # Main execution when the script is run if __name__ == '__main__': # Get database names, archive parameters, and source parameters db_names, archive_param, source_param = config() - + # Process each source database for db_name in db_names: # Combine source and archive parameters - dbparam = source_param[db_name] - dbparam.update(archive_param) - + db_param = source_param[db_name] + db_param.update(archive_param) + # Read table information tables_info = read_tables_info(db_name) - - # Archive data for the current source database - dataArchive(db_name, dbparam, tables_info) + # Archive data for the current source database + data_archive(db_name, db_param, tables_info) diff --git a/data-archive/archive-jobs/pms_archive_table_info.json b/data-archive/archive-jobs/pms_archive_table_info.json index 1972fd1f..fd14e3f9 100644 --- a/data-archive/archive-jobs/pms_archive_table_info.json +++ b/data-archive/archive-jobs/pms_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "secure_biometric_interface_h", @@ -14,7 +14,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "partner_h", @@ -22,7 +22,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" } ] } diff --git a/data-archive/archive-jobs/prereg_archive_table_info.json b/data-archive/archive-jobs/prereg_archive_table_info.json index 10b172ce..203edb2c 100644 --- a/data-archive/archive-jobs/prereg_archive_table_info.json +++ b/data-archive/archive-jobs/prereg_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "false" + "need_archival": "delete" } ] } diff --git a/data-archive/archive-jobs/regprc_archive_table_info.json b/data-archive/archive-jobs/regprc_archive_table_info.json index e0fdd88c..16e137ae 100644 --- a/data-archive/archive-jobs/regprc_archive_table_info.json +++ b/data-archive/archive-jobs/regprc_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "abis_resp_id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "abis_response", @@ -14,7 +14,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "abis_request", @@ -22,7 +22,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "reg_demo_dedupe_list", @@ -30,7 +30,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "registration_transaction", @@ -38,7 +38,7 @@ "id_column": "regtrn_id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "true" + "need_archival": "archive" } ] } diff --git a/data-archive/archive-jobs/resident_archive_table_info.json b/data-archive/archive-jobs/resident_archive_table_info.json index 0024d61c..6273352f 100644 --- a/data-archive/archive-jobs/resident_archive_table_info.json +++ b/data-archive/archive-jobs/resident_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "false" + "need_archival": "delete" }, { "source_table": "resident_grievance_ticket", @@ -14,7 +14,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 365, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "resident_session", @@ -22,7 +22,7 @@ "id_column": "session_id", "date_column": "login_dtimes", "retension_days": 30, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "resident_transaction", @@ -30,7 +30,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 365, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "resident_user_actions", @@ -38,7 +38,7 @@ "id_column": "ida_token", "date_column": "last_bell_notif_click_dtimes", "retension_days": 365, - "need_archival": "true" + "need_archival": "archive" } ] } From aca3e74ec59ab9aee194071d0682ba6e6c8e3f3d Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Mon, 18 Dec 2023 00:01:50 +0530 Subject: [PATCH 031/130] json passed as configmaps Signed-off-by: bhumi46 --- data-archive/.dockerignore | 1 + 1 file changed, 1 insertion(+) diff --git a/data-archive/.dockerignore b/data-archive/.dockerignore index efd2b6a9..0699f034 100755 --- a/data-archive/.dockerignore +++ b/data-archive/.dockerignore @@ -1,4 +1,5 @@ archive-jobs/db.properties +archive-jobs/audit_archive_table_info.json **/.git **/.gitignore **/.vscode From 2afaece9fe5da4c00454cbf459c3a94bfe116a19 Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Mon, 18 Dec 2023 00:10:57 +0530 Subject: [PATCH 032/130] json passed as configmaps Signed-off-by: bhumi46 --- .../mosip_archive/ddl/archive-prereg-otp_transaction.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transaction.sql b/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transaction.sql index a97bb61b..18d16edc 100644 --- a/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transaction.sql +++ b/data-archive/db_scripts/mosip_archive/ddl/archive-prereg-otp_transaction.sql @@ -15,7 +15,7 @@ CREATE TABLE archive.mosip_prereg_otp_transaction( upd_dtimes timestamp, is_deleted boolean, del_dtimes timestamp, - CONSTRAINT pk_otpt_id PRIMARY KEY (id) + CONSTRAINT pk_otptrans_id PRIMARY KEY (id) ); COMMENT ON TABLE archive.mosip_prereg_otp_transaction IS 'All OTP related data and validation details are maintained here for Pre Registration module.'; From e83a19dc77cea6189b18ac2a715d52a5deb0b7ff Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Mon, 18 Dec 2023 10:58:39 +0530 Subject: [PATCH 033/130] json passed as configmaps Signed-off-by: bhumi46 --- data-archive/Dockerfile | 5 +++-- data-archive/archive-jobs/audit_archive_table_info.json | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/data-archive/Dockerfile b/data-archive/Dockerfile index 8c2d1fdf..6d955eb2 100644 --- a/data-archive/Dockerfile +++ b/data-archive/Dockerfile @@ -1,5 +1,5 @@ # Use a specific version of the Postgres image -FROM postgres:16 +FROM postgres:15.4 # Define build arguments and set labels ARG SOURCE @@ -105,7 +105,8 @@ ENV DB_SERVERIP= \ ARCHIVE_DB_UNAME= \ ARCHIVE_DB_PASS= \ ARCHIVE_DB_NAME= \ - ARCHIVE_SCHEMA_NAME= + ARCHIVE_SCHEMA_NAME= \ + AUDIT_archive_table_info=audit_archive_table_info.json # Set entrypoint ENTRYPOINT ["./entrypoint.sh"] diff --git a/data-archive/archive-jobs/audit_archive_table_info.json b/data-archive/archive-jobs/audit_archive_table_info.json index 5e91fcb2..08cf3e47 100644 --- a/data-archive/archive-jobs/audit_archive_table_info.json +++ b/data-archive/archive-jobs/audit_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "log_id", "date_column": "log_dtimes", "retension_days": 30, - "need_archival": "true" + "need_archival": "archive" } ] } From 4d0be98339d86e8ed396a43a439841309ec757b9 Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Mon, 18 Dec 2023 17:05:26 +0530 Subject: [PATCH 034/130] edited for volume mounts Signed-off-by: bhumi46 --- data-archive/.dockerignore | 2 +- data-archive/Dockerfile | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/data-archive/.dockerignore b/data-archive/.dockerignore index 0699f034..ccebd001 100755 --- a/data-archive/.dockerignore +++ b/data-archive/.dockerignore @@ -1,5 +1,5 @@ archive-jobs/db.properties -archive-jobs/audit_archive_table_info.json +archive-jobs/*.json **/.git **/.gitignore **/.vscode diff --git a/data-archive/Dockerfile b/data-archive/Dockerfile index 6d955eb2..5c758ea2 100644 --- a/data-archive/Dockerfile +++ b/data-archive/Dockerfile @@ -105,8 +105,7 @@ ENV DB_SERVERIP= \ ARCHIVE_DB_UNAME= \ ARCHIVE_DB_PASS= \ ARCHIVE_DB_NAME= \ - ARCHIVE_SCHEMA_NAME= \ - AUDIT_archive_table_info=audit_archive_table_info.json + ARCHIVE_SCHEMA_NAME= # Set entrypoint ENTRYPOINT ["./entrypoint.sh"] From b9e95af3ad7c9a46238870f3faf027813a060311 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Wed, 20 Dec 2023 08:59:23 +0530 Subject: [PATCH 035/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../archive-jobs/mosip_archive_main.py | 25 ++++++------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index c55b3081..d109f9f5 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -102,28 +102,19 @@ def get_tablevalues(row): final_values = final_values[:-1] return final_values -# Function to read table information from a JSON file or environment variable def read_tables_info(db_name): try: - # Attempt to read table information from environment variables - tables_info_str = os.environ.get(f"{db_name.lower()}_archive_table_info") - if tables_info_str is not None: - tables_info = json.loads(tables_info_str)['tables_info'] - print(f"Table information loaded from environment variables for {db_name}.") - return tables_info - except json.JSONDecodeError: - print(f"Error decoding JSON from environment variable {db_name.lower()}_archive_table_info.") - - try: - # Attempt to read table information from a JSON file with open(f'{db_name.lower()}_archive_table_info.json') as f: - tables_info = json.load(f)['tables_info'] + tables_info = json.load(f) print(f"{db_name.lower()}_archive_table_info.json file found and loaded.") - return tables_info + return tables_info['tables_info'] except FileNotFoundError: - # Handle case when JSON file is not found - print(f"{db_name.lower()}_archive_table_info.json file not found.") - sys.exit(1) + print(f"{db_name.lower()}_archive_table_info.json file not found. Using environment variables.") + tables_info = os.environ.get(f"{db_name.lower()}_archive_table_info") + if tables_info is None: + print(f"Environment variable {db_name.lower()}_archive_table_info not found.") + sys.exit(1) + return json.loads(tables_info)['tables_info'] # Function to archive data from source to archive database def data_archive(db_name, db_param, tables_info): From bde3fd5f6d08193cd9459cb33755ef4ae917f08e Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Wed, 20 Dec 2023 11:55:01 +0530 Subject: [PATCH 036/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../archive-jobs/mosip_archive_main.py | 28 ++++++++++++++----- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index d109f9f5..693732c7 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -103,18 +103,32 @@ def get_tablevalues(row): return final_values def read_tables_info(db_name): + file_path = f'{db_name.lower()}_archive_table_info.json' + file_in_container_path = f'{db_name.lower()}_archive_table_info' + try: - with open(f'{db_name.lower()}_archive_table_info.json') as f: + with open(file_path) as f: tables_info = json.load(f) - print(f"{db_name.lower()}_archive_table_info.json file found and loaded.") + print(f"{file_path} file found and loaded.") return tables_info['tables_info'] except FileNotFoundError: - print(f"{db_name.lower()}_archive_table_info.json file not found. Using environment variables.") - tables_info = os.environ.get(f"{db_name.lower()}_archive_table_info") - if tables_info is None: - print(f"Environment variable {db_name.lower()}_archive_table_info not found.") + print(f"{file_path} file not found. Trying to retrieve from container volume.") + + # Assuming CONTAINER_VOLUME_PATH is the environment variable containing the path to the container volume + container_volume_path = os.environ.get('CONTAINER_VOLUME_PATH') + + if container_volume_path: + file_path_in_volume = os.path.join(container_volume_path, file_in_container_path) + try: + with open(file_path_in_volume) as f: + tables_info = json.load(f) + print(f"Data retrieved from container volume: {file_path_in_volume}") + return tables_info['tables_info'] + except FileNotFoundError: + print(f"{file_path_in_volume} not found in container volume.") + else: + print("Container volume path not provided. Exiting.") sys.exit(1) - return json.loads(tables_info)['tables_info'] # Function to archive data from source to archive database def data_archive(db_name, db_param, tables_info): From 3912e9eb6216a12a7486486a3ac677067f07d1c5 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Wed, 20 Dec 2023 11:56:30 +0530 Subject: [PATCH 037/130] Update Dockerfile Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/data-archive/Dockerfile b/data-archive/Dockerfile index 5c758ea2..82ab194f 100644 --- a/data-archive/Dockerfile +++ b/data-archive/Dockerfile @@ -105,7 +105,8 @@ ENV DB_SERVERIP= \ ARCHIVE_DB_UNAME= \ ARCHIVE_DB_PASS= \ ARCHIVE_DB_NAME= \ - ARCHIVE_SCHEMA_NAME= + ARCHIVE_SCHEMA_NAME= \ + CONTAINER_VOLUME_PATH= # Set entrypoint ENTRYPOINT ["./entrypoint.sh"] From 0efe72801e0b92356eddfb1c455922a5bafca283 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Wed, 20 Dec 2023 18:32:30 +0530 Subject: [PATCH 038/130] Update credential_archive_table_info.json Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/credential_archive_table_info.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data-archive/archive-jobs/credential_archive_table_info.json b/data-archive/archive-jobs/credential_archive_table_info.json index aa2ee4eb..eef08789 100644 --- a/data-archive/archive-jobs/credential_archive_table_info.json +++ b/data-archive/archive-jobs/credential_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "true" + "need_archival": "archive" } ] } From f798c9456210f51aab076b9a2067a43854511298 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Wed, 20 Dec 2023 18:33:01 +0530 Subject: [PATCH 039/130] Update esignet_archive_table_info.json Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/esignet_archive_table_info.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data-archive/archive-jobs/esignet_archive_table_info.json b/data-archive/archive-jobs/esignet_archive_table_info.json index 921fbcad..f423d027 100644 --- a/data-archive/archive-jobs/esignet_archive_table_info.json +++ b/data-archive/archive-jobs/esignet_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "true" + "need_archival": "archive" } ] } From 778439d0e19814162765337890f70daab98a6740 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Wed, 20 Dec 2023 18:33:21 +0530 Subject: [PATCH 040/130] Update ida_archive_table_info.json Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/ida_archive_table_info.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/data-archive/archive-jobs/ida_archive_table_info.json b/data-archive/archive-jobs/ida_archive_table_info.json index 048caf2f..683084a0 100644 --- a/data-archive/archive-jobs/ida_archive_table_info.json +++ b/data-archive/archive-jobs/ida_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "event_id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "true" + "need_archival": "archive" }, { "source_table": "otp_transaction", @@ -14,7 +14,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "false" + "need_archival": "archive" } ] } From b58d024e551a0e4c44c4d3b943ac23a01362ec24 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Wed, 20 Dec 2023 23:43:51 +0530 Subject: [PATCH 041/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 693732c7..e1997d82 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -79,7 +79,6 @@ def config(): # Return extracted parameters return db_names, archive_param, source_param -# Function to create source parameters for a specific database def create_source_param(config_parser, env_vars, db_name): param_keys = ['SOURCE_DB_HOST', 'SOURCE_DB_PORT', 'SOURCE_DB_NAME', 'SOURCE_SCHEMA_NAME', 'SOURCE_DB_UNAME', 'SOURCE_DB_PASS'] source_param = {} @@ -87,7 +86,13 @@ def create_source_param(config_parser, env_vars, db_name): # Extract source parameters from environment variables or config file for key in param_keys: env_key = f'{db_name}_{key}' - source_param[env_key] = env_vars.get(env_key) or config_parser.get(db_name, env_key) + + if config_parser: + # If config_parser is provided, use it to get the value or fallback to environment variables + source_param[env_key] = env_vars.get(env_key) or config_parser.get(db_name, {}).get(env_key) + else: + # If config_parser is not provided, use only environment variables + source_param[env_key] = env_vars.get(env_key) return source_param From 98799f9d0d8c5042e9dddbcc1dacc2a2cf197513 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 21 Dec 2023 00:28:33 +0530 Subject: [PATCH 042/130] tested and reverted Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index e1997d82..5dfb2200 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -78,7 +78,7 @@ def config(): # Return extracted parameters return db_names, archive_param, source_param - +# Function to create source parameters for a specific database def create_source_param(config_parser, env_vars, db_name): param_keys = ['SOURCE_DB_HOST', 'SOURCE_DB_PORT', 'SOURCE_DB_NAME', 'SOURCE_SCHEMA_NAME', 'SOURCE_DB_UNAME', 'SOURCE_DB_PASS'] source_param = {} @@ -86,13 +86,7 @@ def create_source_param(config_parser, env_vars, db_name): # Extract source parameters from environment variables or config file for key in param_keys: env_key = f'{db_name}_{key}' - - if config_parser: - # If config_parser is provided, use it to get the value or fallback to environment variables - source_param[env_key] = env_vars.get(env_key) or config_parser.get(db_name, {}).get(env_key) - else: - # If config_parser is not provided, use only environment variables - source_param[env_key] = env_vars.get(env_key) + source_param[env_key] = env_vars.get(env_key) or config_parser.get(db_name, env_key) return source_param From ad19d621399f14d8b0050a6b34f2a5571931f39a Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 21 Dec 2023 00:44:04 +0530 Subject: [PATCH 043/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 5dfb2200..9ac0a7fa 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -10,11 +10,15 @@ # Function to check if required keys are present in a section def check_keys(keys, section, prefix=""): + missing_keys = [] for key in keys: env_key = f"{prefix}_{key}" if prefix else key if key not in section and env_key not in section: print(f"Error: {env_key} not found in {section} section.") - sys.exit(1) + missing_keys.append(key) + if missing_keys: + print(f"Missing keys: {', '.join(missing_keys)}") + sys.exit(1) # Function to read configuration from file or environment variables def config(): From dbf467f70eda90317a4f0ef8ffd20cd606c0cca5 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 21 Dec 2023 01:35:48 +0530 Subject: [PATCH 044/130] Update mosip_archive_main.py Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/mosip_archive_main.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 9ac0a7fa..241f0713 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -90,7 +90,10 @@ def create_source_param(config_parser, env_vars, db_name): # Extract source parameters from environment variables or config file for key in param_keys: env_key = f'{db_name}_{key}' - source_param[env_key] = env_vars.get(env_key) or config_parser.get(db_name, env_key) + if config_parser is not None: + source_param[env_key] = env_vars.get(env_key) or config_parser.get(db_name, {}).get(env_key) + else: + source_param[env_key] = env_vars.get(env_key) return source_param From d53bce8be43b0bd2e42dcaa1b6529db9340ecca1 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 21 Dec 2023 16:13:25 +0530 Subject: [PATCH 045/130] [MOSIP-29979]changed need_archival to operation_type Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- .../archive-jobs/mosip_archive_main.py | 51 ++++++++++++++----- 1 file changed, 39 insertions(+), 12 deletions(-) diff --git a/data-archive/archive-jobs/mosip_archive_main.py b/data-archive/archive-jobs/mosip_archive_main.py index 241f0713..beff7233 100644 --- a/data-archive/archive-jobs/mosip_archive_main.py +++ b/data-archive/archive-jobs/mosip_archive_main.py @@ -136,7 +136,6 @@ def read_tables_info(db_name): print("Container volume path not provided. Exiting.") sys.exit(1) -# Function to archive data from source to archive database def data_archive(db_name, db_param, tables_info): source_conn = None archive_conn = None @@ -168,9 +167,32 @@ def data_archive(db_name, db_param, tables_info): source_table_name = table_info['source_table'] archive_table_name = table_info['archive_table'] id_column = table_info['id_column'] - need_archival = table_info.get('need_archival', 'none').lower() + operation_type = table_info.get('operation_type', 'none').lower() - if need_archival == 'archive': + if operation_type == 'delete': + # Perform delete logic based on retention_days + if 'date_column' in table_info and 'retention_days' in table_info: + date_column = table_info['date_column'] + retention_days = table_info['retention_days'] + select_query = f"SELECT * FROM {sschema_name}.{source_table_name} WHERE {date_column} < NOW() - INTERVAL '{retention_days} days'" + else: + select_query = f"SELECT * FROM {sschema_name}.{source_table_name}" + + source_cur.execute(select_query) + rows = source_cur.fetchall() + select_count = source_cur.rowcount + print(f"{select_count} Record(s) selected for deletion from {source_table_name} from source database {db_name}") + + if select_count > 0: + delete_query = f'DELETE FROM "{sschema_name}"."{source_table_name}" WHERE "{id_column}" = %s' + for row in rows: + source_cur.execute(delete_query, (row[0],)) + source_conn.commit() + delete_count = source_cur.rowcount + print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + + elif operation_type == 'archive_delete': + # Perform archive and delete logic if 'date_column' in table_info and 'retention_days' in table_info: date_column = table_info['date_column'] retention_days = table_info['retention_days'] @@ -199,7 +221,8 @@ def data_archive(db_name, db_param, tables_info): delete_count = source_cur.rowcount print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") - elif need_archival == 'delete': + elif operation_type == 'archive_nodelete': + # Perform archive logic without deleting if 'date_column' in table_info and 'retention_days' in table_info: date_column = table_info['date_column'] retention_days = table_info['retention_days'] @@ -209,21 +232,25 @@ def data_archive(db_name, db_param, tables_info): source_cur.execute(select_query) rows = source_cur.fetchall() select_count = source_cur.rowcount - print(f"{select_count} Record(s) selected for deletion from {source_table_name} from source database {db_name}") + print(f"{select_count} Record(s) selected for archive from {source_table_name} from source database {db_name}") if select_count > 0: - delete_query = f'DELETE FROM "{sschema_name}"."{source_table_name}" WHERE "{id_column}" = %s' for row in rows: - source_cur.execute(delete_query, (row[0],)) - source_conn.commit() - delete_count = source_cur.rowcount - print(f"{delete_count} Record(s) deleted successfully for table {source_table_name} from source database {db_name}") + row_values = get_tablevalues(row) + insert_query = f"INSERT INTO {aschema_name}.{archive_table_name} VALUES ({', '.join(['%s']*len(row))}) ON CONFLICT DO NOTHING" + archive_cur.execute(insert_query, row) + archive_conn.commit() + insert_count = archive_cur.rowcount + if insert_count == 0: + print(f"Skipping duplicate record with ID: {row[0]} in table {archive_table_name} from source database {db_name}") + else: + print(f"{insert_count} Record(s) inserted successfully for table {archive_table_name} from source database {db_name}") - elif need_archival == 'none': + elif operation_type == 'none': print(f"Skipping archival for table {source_table_name} from source database {db_name}") else: - print(f"Error: Invalid value for 'need_archival' in table {source_table_name}. Use 'archive', 'delete', or 'none'.") + print(f"Error: Invalid value for 'operation_type' in table {source_table_name}. Use 'delete', 'archive_delete', 'archive_nodelete', or 'none'.") except (Exception, psycopg2.DatabaseError) as error: print("Error during data archiving:", error) From f5b5e92752184ccb7171d4eb09c64c93e23c86bc Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Wed, 27 Dec 2023 12:09:48 +0530 Subject: [PATCH 046/130] added operation_type instead of need_archival Signed-off-by: bhumi46 --- .../archive-jobs/audit_archive_table_info.json | 2 +- .../archive-jobs/credential_archive_table_info.json | 2 +- .../archive-jobs/esignet_archive_table_info.json | 4 ++-- .../archive-jobs/ida_archive_table_info.json | 4 ++-- .../archive-jobs/idrepo_archive_table_info.json | 6 +++--- .../archive-jobs/kernel_archive_table_info.json | 4 ++-- .../archive-jobs/master_archive_table_info.json | 12 ++++++------ .../archive-jobs/pms_archive_table_info.json | 6 +++--- .../archive-jobs/prereg_archive_table_info.json | 4 ++-- .../archive-jobs/regprc_archive_table_info.json | 10 +++++----- .../archive-jobs/resident_archive_table_info.json | 10 +++++----- 11 files changed, 32 insertions(+), 32 deletions(-) diff --git a/data-archive/archive-jobs/audit_archive_table_info.json b/data-archive/archive-jobs/audit_archive_table_info.json index 08cf3e47..5ca1975e 100644 --- a/data-archive/archive-jobs/audit_archive_table_info.json +++ b/data-archive/archive-jobs/audit_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "log_id", "date_column": "log_dtimes", "retension_days": 30, - "need_archival": "archive" + "operation_type": "archive_delete" } ] } diff --git a/data-archive/archive-jobs/credential_archive_table_info.json b/data-archive/archive-jobs/credential_archive_table_info.json index eef08789..21c0bb41 100644 --- a/data-archive/archive-jobs/credential_archive_table_info.json +++ b/data-archive/archive-jobs/credential_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "archive" + "operation_type": "archive_delete" } ] } diff --git a/data-archive/archive-jobs/esignet_archive_table_info.json b/data-archive/archive-jobs/esignet_archive_table_info.json index f423d027..ca74ad39 100644 --- a/data-archive/archive-jobs/esignet_archive_table_info.json +++ b/data-archive/archive-jobs/esignet_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "archive" + "operation_type": "archive_delete" } ] -} +} \ No newline at end of file diff --git a/data-archive/archive-jobs/ida_archive_table_info.json b/data-archive/archive-jobs/ida_archive_table_info.json index 683084a0..8a05964b 100644 --- a/data-archive/archive-jobs/ida_archive_table_info.json +++ b/data-archive/archive-jobs/ida_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "event_id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "otp_transaction", @@ -14,7 +14,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "archive" + "operation_type": "archive_delete" } ] } diff --git a/data-archive/archive-jobs/idrepo_archive_table_info.json b/data-archive/archive-jobs/idrepo_archive_table_info.json index b1080225..dc491dc3 100644 --- a/data-archive/archive-jobs/idrepo_archive_table_info.json +++ b/data-archive/archive-jobs/idrepo_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "credential_request_status", @@ -14,7 +14,7 @@ "id_column": "individual_id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "uin_draft", @@ -22,7 +22,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "archive" + "operation_type": "archive_delete" } ] } diff --git a/data-archive/archive-jobs/kernel_archive_table_info.json b/data-archive/archive-jobs/kernel_archive_table_info.json index 82689f6c..e6de68c6 100644 --- a/data-archive/archive-jobs/kernel_archive_table_info.json +++ b/data-archive/archive-jobs/kernel_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "generated_dtimes", "retension_days": 7, - "need_archival": "delete" + "operation_type": "delete" } ] -} +} \ No newline at end of file diff --git a/data-archive/archive-jobs/master_archive_table_info.json b/data-archive/archive-jobs/master_archive_table_info.json index 4a116103..eae767fe 100644 --- a/data-archive/archive-jobs/master_archive_table_info.json +++ b/data-archive/archive-jobs/master_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 91, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "device_master_h", @@ -14,7 +14,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 365, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "machine_master_h", @@ -22,7 +22,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "registration_center_h", @@ -30,7 +30,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 365 , - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "user_detail_h", @@ -38,7 +38,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "zone_user_h", @@ -46,7 +46,7 @@ "id_column": "usr_id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" } ] } diff --git a/data-archive/archive-jobs/pms_archive_table_info.json b/data-archive/archive-jobs/pms_archive_table_info.json index fd14e3f9..f120a2b3 100644 --- a/data-archive/archive-jobs/pms_archive_table_info.json +++ b/data-archive/archive-jobs/pms_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "secure_biometric_interface_h", @@ -14,7 +14,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "partner_h", @@ -22,7 +22,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" } ] } diff --git a/data-archive/archive-jobs/prereg_archive_table_info.json b/data-archive/archive-jobs/prereg_archive_table_info.json index 203edb2c..d501bc4c 100644 --- a/data-archive/archive-jobs/prereg_archive_table_info.json +++ b/data-archive/archive-jobs/prereg_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "delete" + "operation_type": "delete" } ] -} +} \ No newline at end of file diff --git a/data-archive/archive-jobs/regprc_archive_table_info.json b/data-archive/archive-jobs/regprc_archive_table_info.json index 16e137ae..631279f7 100644 --- a/data-archive/archive-jobs/regprc_archive_table_info.json +++ b/data-archive/archive-jobs/regprc_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "abis_resp_id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "abis_response", @@ -14,7 +14,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "abis_request", @@ -22,7 +22,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "reg_demo_dedupe_list", @@ -30,7 +30,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "registration_transaction", @@ -38,7 +38,7 @@ "id_column": "regtrn_id", "date_column": "cr_dtimes", "retension_days": 183, - "need_archival": "archive" + "operation_type": "archive_delete" } ] } diff --git a/data-archive/archive-jobs/resident_archive_table_info.json b/data-archive/archive-jobs/resident_archive_table_info.json index 6273352f..f14c1be3 100644 --- a/data-archive/archive-jobs/resident_archive_table_info.json +++ b/data-archive/archive-jobs/resident_archive_table_info.json @@ -6,7 +6,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 30, - "need_archival": "delete" + "operation_type": "delete" }, { "source_table": "resident_grievance_ticket", @@ -14,7 +14,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 365, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "resident_session", @@ -22,7 +22,7 @@ "id_column": "session_id", "date_column": "login_dtimes", "retension_days": 30, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "resident_transaction", @@ -30,7 +30,7 @@ "id_column": "id", "date_column": "cr_dtimes", "retension_days": 365, - "need_archival": "archive" + "operation_type": "archive_delete" }, { "source_table": "resident_user_actions", @@ -38,7 +38,7 @@ "id_column": "ida_token", "date_column": "last_bell_notif_click_dtimes", "retension_days": 365, - "need_archival": "archive" + "operation_type": "archive_delete" } ] } From f06ba0f2f667665c45a701f068d0d1ee52987887 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 28 Dec 2023 13:17:24 +0530 Subject: [PATCH 047/130] Create README.md Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/archive-jobs/README.md | 95 +++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 data-archive/archive-jobs/README.md diff --git a/data-archive/archive-jobs/README.md b/data-archive/archive-jobs/README.md new file mode 100644 index 00000000..d4c2bc66 --- /dev/null +++ b/data-archive/archive-jobs/README.md @@ -0,0 +1,95 @@ +# PostgreSQL Data Archiving Script + +This Python script is designed for archiving data from multiple PostgreSQL databases. It establishes connections to source and archive databases and performs archiving operations based on specified configurations for each table. + +## Prerequisites + +Ensure the following prerequisites are met before using the script: + +- Python installed (version 3.6 or later) +- PostgreSQL installed +- Required Python packages installed: `psycopg2` + +```bash +pip install psycopg2 + + +## Configuration +The script uses a configuration file (db.properties) or environment variables for database connection details. Ensure either the configuration file is present or the required environment variables are set. + + +Configuration File (db.properties) +If using a configuration file, create a db.properties file in the script's directory with the following format: + +[ARCHIVE] +ARCHIVE_DB_HOST = your_archive_db_host +ARCHIVE_DB_PORT = your_archive_db_port +ARCHIVE_DB_NAME = your_archive_db_name +ARCHIVE_SCHEMA_NAME = your_archive_schema_name +ARCHIVE_DB_UNAME = your_archive_db_username +ARCHIVE_DB_PASS = your_archive_db_password + +[Databases] +DB_NAMES = db_name1, db_name2 + +[db_name1] +SOURCE_DB_HOST = source_db_host1 +SOURCE_DB_PORT = source_db_port1 +SOURCE_DB_NAME = source_db_name1 +SOURCE_SCHEMA_NAME = source_schema_name1 +SOURCE_DB_UNAME = source_db_username1 +SOURCE_DB_PASS = source_db_password1 + +[db_name2] +SOURCE_DB_HOST = source_db_host2 +SOURCE_DB_PORT = source_db_port2 +SOURCE_DB_NAME = source_db_name2 +SOURCE_SCHEMA_NAME = source_schema_name2 +SOURCE_DB_UNAME = source_db_username2 +SOURCE_DB_PASS = source_db_password2 + +Environment Variables +Alternatively, set the following environment variables: +export ARCHIVE_DB_HOST=your_archive_db_host +export ARCHIVE_DB_PORT=your_archive_db_port +export ARCHIVE_DB_NAME=your_archive_db_name +export ARCHIVE_SCHEMA_NAME=your_archive_schema_name +export ARCHIVE_DB_UNAME=your_archive_db_username +export ARCHIVE_DB_PASS=your_archive_db_password + +export DB_NAMES=db_name1,db_name2 + +export DB_NAME1_SOURCE_DB_HOST=source_db_host1 +export DB_NAME1_SOURCE_DB_PORT=source_db_port1 +export DB_NAME1_SOURCE_DB_NAME=source_db_name1 +export DB_NAME1_SOURCE_SCHEMA_NAME=source_schema_name1 +export DB_NAME1_SOURCE_DB_UNAME=source_db_username1 +export DB_NAME1_SOURCE_DB_PASS=source_db_password1 + +export DB_NAME2_SOURCE_DB_HOST=source_db_host2 +export DB_NAME2_SOURCE_DB_PORT=source_db_port2 +export DB_NAME2_SOURCE_DB_NAME=source_db_name2 +export DB_NAME2_SOURCE_SCHEMA_NAME=source_schema_name2 +export DB_NAME2_SOURCE_DB_UNAME=source_db_username2 +export DB_NAME2_SOURCE_DB_PASS=source_db_password2 + +Running the Script +Execute the script by running the following command: +python script_name.py + +Replace script_name.py with the actual name of the Python script + +## Archiving Operations +The script supports the following archiving operations for each table: +*operation_type in table_info.json: + +- Delete: Delete records from the source table. +- Archive and Delete: Archive records to an archive table and then delete them from the source table. +- Archive (No Delete): Archive records to an archive table without deleting them from the source table. +- None: Skip archival for the specified table. +Ensure to review and customize the tables_info in the script to match your database structure and archiving requirements. + +Retrieving Table Information from JSON File or Container Volume +The script attempts to load table information from a JSON file ({db_name.lower()}_archive_table_info.json). If the file is not found, it tries to retrieve the information from a container volume specified by the CONTAINER_VOLUME_PATH environment variable. + +Set the CONTAINER_VOLUME_PATH environment variable to the path of the container volume containing the JSON file. From 24323e7148526869239dab95378cfc043890c702 Mon Sep 17 00:00:00 2001 From: bhumi46 <111699703+bhumi46@users.noreply.github.com> Date: Thu, 28 Dec 2023 13:20:20 +0530 Subject: [PATCH 048/130] Update README.MD Signed-off-by: bhumi46 <111699703+bhumi46@users.noreply.github.com> --- data-archive/db_scripts/README.MD | 180 +++++++++++++++++++++++++++++- 1 file changed, 175 insertions(+), 5 deletions(-) diff --git a/data-archive/db_scripts/README.MD b/data-archive/db_scripts/README.MD index 90735ba5..4d2ff543 100644 --- a/data-archive/db_scripts/README.MD +++ b/data-archive/db_scripts/README.MD @@ -1,8 +1,178 @@ -# Pre-regstration Database +## MOSIP Commons module Databases (**mosip_master, mosip_kernel, mosip_idrepo, mosip_idmap, mosip_iam, mosip_audit**) scripts inventory and deployment guidelines on postgresql database. -## Overview -This folder containers various SQL scripts to create database and tables in postgres. The tables are described under `/ddl/`. Default data that's populated in the tables is present under `/dml` folder +#### The details disclosed below gives a clear information on complete database script structure with the instructions for database scripts deployments. -This folder containers various SQL scripts to create database and tables in postgres. These scripts are automatically run with as part of DB initialisation in [Sandbox Deployment](https://docs.mosip.io/1.2.0/deployment/sandbox-deployment) +## Prerequisities -Developers may run the SQLs using `/deploy.sh` script. +* DB Server and access details + +* Postgres client (psql) has to be installed on the deployment servers. + +* Copy latest database scripts(DDL, DML, .SH ... etc) from git/repository on to the DB deployment server. + +* Necessary details to be updated in peoperties file against to the releavnt variables being used (details listed below). + +* Database objects related to MOSIP modules are placed in "**mosip_base_directory**>>db_scripts>>mosip_ folder on git/repository + +**Example:** the commons module script folder is /**mosip_base_directory**>>db_scripts>>mosip_kernel where all the database scripts related to kernel are available. + +* Create a log file directory on DB deployment server before updating the properties file. Please follow the steps to create the same: + + bash-4.2$mkdir /mosip_base_directory/ + +* If we wish to place the log files under different directory other than the above mentioned then we need to create directory and specify the path of the directory in the properties file. + +* Pull the DB deployment scripts from Git repository to the deployment server and start deploying OR + +* If are pulling to local system from Git repository and pushing them back to deployment server using WinSCP then make a note to modify the following encoding settings in WinSCP before pushing the files to deployment server --> Open WinSCP --> Options --> Preferences --> Transfer --> Edit --> In "Transfer mode" section --> select "Text" --> Click Ok --> Click Ok + +## Each database folder has the following files / folders + +* **ddl folder:** This folder contains all the database data definition language (DDL) scripts to create or alter a database object of this module. + +* **dml folder:** This folder contains the scripts (insert/update/delete scripts) to create seed data / metadata needed to run this module. + +* **mosip__db.sql:** This file contains the database creation script of this module + +* **mosip__grants.sql:** The needed privilege / grants scripts assigned to database user / role to access database objects are described in this file. + +* **mosip_role_user.sql:** The role creation script that will be used by the application to perform DML operations is defined here. + +* **mosip_role_common.sql:** This file contains the common roles creation script that are needed to manage the database. + +* **mosip__ddl_deploy.sql:** This is a wrapper script used to **deploy the DDL scripts available in ddl folder**. This will also be used to prepare the script run sequence to manage all the needed dependency across DB objects being created. + +* **mosip__dml_deploy.sql:** This is a wrapper script used to **deploy the DML scripts available in dml folder**. This will also used to prepare the script run sequence to manage all the needed dependency across DB objects. + +* **mosip__db_deploy.sh:** This is the shell script available and present in each database folders/directories. + +* **mosip__deploy.properties:** This is the properties file name and present in each database. + +* **mosip_commons_db_deployment.sh:** This is the .sh file which is present in /home/madmin/database directory and which will be executed for all commons database deployment in single command execution. + +**Note :** Not all Modules will have dml scripts. Make necessary changes in the properties file with dml variables for the modules where dml exists. + +**Note :** No need to change anything in the shell script unless it is really causing any problem or any further implementation is being introduced. + +Once we complete with sourcing the database files, we need to follow the below DB deployment process with the modifying the properties file according the requirement. + +## Deployment can be performed in two ways based on the requirement and they are as follows: +1) DB Deployment for all common module databases +2) DB Deployment for single or selected databases + +### Properties file variable details and description: Properties file has to be updated with the required details before proceeding with deployment steps for each databases. + +**DB_SERVERIP:** Contains details of Destination DB SERVER_IP(Ex:10.0.0.1) where the deployment is targeted + +**DB_PORT:** Contains the postgres server port details where the postgres is allowed to connect. Ex: 5433 + +**SU_USER:** Contains the postgres super user name to connect to the postgres database i.e. postgres + +**SU_USER_PWD:** Contains the password for postgres super user + +**DEFAULT_DB_NAME:** Default database name to connect with respective postgres server i.e. ex: postgres + +**MOSIP_DB_NAME:** MOSIP Database name for which the deployment is scheduled. + +**SYSADMIN_USER:** This variable contains the mosip_common_role which indeed is going to be the super user for the remaining actions going to be performed by shell script. + +**SYSADMIN_PWD:** Contains the credential details for SYSADMIN_USER. + +**DBADMIN_PWD:** Contains the credential details for DBADMIN_USER. + +**APPADMIN_PWD:** Contains the credential details for APPADMIN_USER. + +**DBUSER_PWD:** Contains the credential details for dbuserpwd. + +**BASE_PATH:** Path for DB scrips which are kept in the Deployment server. + +**LOG_PATH:** Path where deployment log file will be created + +**COMMON_ROLE_FILENAME:** Contains the common roles creation filename, ex: mosip_role_common.sql + +**APP_ROLE_FILENAME:** Contains specific DB user role creation filename, ex: mosip_role_databaseuser.sql + +**DB_CREATION_FILENAME:** Contains specific DB creation script name, ex: mosip_database_db.sql. + +**ACCESS_GRANT_FILENAME:** This variable contains file name of access provisioning script details for the above created users, ex: mosip__grants.sql. + +**DDL_FILENAME:** DDL script file name, ex:mosip__ddl_deploy.sql. + +**DML_FLAG:** Its a flag variable which contains value as 0 or 1 for any DML existance for the particular DB. if flag=0 then no DML else flag=1. + +**DML_FILENAME:** DML cript file name only if the flag=1, else it will be empty or null, ex: mosip__dml_deploy.sql. + +**Note - Make sure, There is a single empty line at end of the .properties files content and No spaces in beggining and end of the parameter values** + +## DB Deployment for all common module databases with single click deployment: + +**Step 1** -> Make prior modification to all the respective database properties files **(mosip__deploy.properties)** in the respective database directories. Path of properties file and variables list remains same as explained above. Once the properties files are ready then access the directory where the deployment script is kept. + +**Step 2** -> Deployment on all common module databases, run the **"mosip_commons_db_deployment.sh"** script which is avialble in the /database directory. To access **"mosip_commons_db_deployment.sh"** script, follow the below given commands: + + **Enter:-bash-4.2$** cd /home/madmin/database/ + + **Enter:-bash-4.2$** bash mosip_commons_db_deployment.sh + +**Step 3** -> Please observe Post Deployment Validation steps below + +**No modification required to be done on any of the <>.sql files in the database folder. If it is required to be modified then please reach out to database team and have it modified.** + +## DB_Deployment for single or selected databases + +**Step 1:** update the properties(.properties) file with the required parameter values for single or selected databases. + +All these .sh and properties files are kept in each database directories. Please follow the below steps: + +**Step 2** -> Login into Deployment server/VM + +**Step 3** -> check the pwd(present working directory). Make sure we are inside the right database folder/directory to run the deployment for that specific database. + +**Enter:-bash-4.2$** pwd +This should be the path if we are performing deployment for the database name **mosip_schema_name** : /home/madmin/database/mosip_ + +**Step 4** -> Please move all the necessary files from local directory to the deployment server directory under respective databases. + +**Step 5** -> After prior modifications to the properties file, run the below deployment shell script as given: + +**Enter:-bash-4.2$** bash mosip__db_deploy.sh mosip__deploy.properties + +**Step 6** -> Please observe Post Deployment Validation steps below + +**No modification required to be done on any of the <>.sql files in the database folder. If it is required to be modified then please reach out to database team and have it modified.** + +### Post Deployment Validation + +**Note:** If you encounter the following messages then please recheck the details(ip address, port number, database name, password) entered in the properties file, the message would be as follows: + +. + + + + + +**Key points during or after the script execution:** + + * Properties file found message + + * Server status + + * Accessing the right path for DB deploy + + * Creates respective roles + + * Check for any active connections + + * Creates roles, creating Database, schemas, granting access, creating respective tables. + + * Loading data or DML operations valid only for those DB's which carries DML actions. + + * End of sourcing or deployment process. + +**Post deployment process, look out for database deployment log file which captures all stages of deployment. Log file path is defined in the properties file of the databases.** + +**During all the above stages please watch out for any errors which will be capture in the log file.** + +Kindly ignore **NOTICE** or **SKIPPING** messages. As these messages states that particular action is already in place hence sql script ignore performing again. + +### Post deployment process, look out for each database deployment log files which captures all stages of deployment. Log file path is defined in the properties file of the respective databases. From 5e4b20306a6e8fe1947391ad604aa44fa5747a33 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 7 Oct 2024 12:42:51 +0530 Subject: [PATCH 049/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 9e32653f..089ccb41 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,7 +29,7 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - uses: mosip/kattu/.github/workflows/docker-build.yml@master + uses: bn46/kattu/.github/workflows/docker-build.yml@master with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} From b4febf27d36abd6be778703252d3dd1ec71a214e Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 7 Oct 2024 12:46:36 +0530 Subject: [PATCH 050/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 089ccb41..1c356b57 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,7 +29,7 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - uses: bn46/kattu/.github/workflows/docker-build.yml@master + uses: bn46/kattu/.github/workflows/docker-build.yml@develop with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} From f296fb4200ffd5ac7cb4f7938256c9c12b692437 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 7 Oct 2024 13:05:57 +0530 Subject: [PATCH 051/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 1c356b57..cca4bb78 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -38,3 +38,5 @@ jobs: ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} + SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} # Optional if you want a different token for this repo + SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} From d8bbec1851c6a8c389be432503c343f1bf503338 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 7 Oct 2024 13:10:56 +0530 Subject: [PATCH 052/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index cca4bb78..e53dc774 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,6 +37,6 @@ jobs: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} - SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} # Optional if you want a different token for this repo - SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} + # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} + # SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} # Optional if you want a different token for this repo + # SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} From eb97d0e59ff9f945582ec3018ad468785422f2db Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 7 Oct 2024 13:11:25 +0530 Subject: [PATCH 053/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index e53dc774..3ebf8cf4 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,6 +37,6 @@ jobs: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - # SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} # SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} # Optional if you want a different token for this repo # SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} From 2b1ee6df36bc5071b101d53cbf2319e18625f05f Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 7 Oct 2024 14:57:21 +0530 Subject: [PATCH 054/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 3ebf8cf4..cca4bb78 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -38,5 +38,5 @@ jobs: ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} - # SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} # Optional if you want a different token for this repo - # SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} + SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} # Optional if you want a different token for this repo + SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} From 56e10bc53f68b156629a6f54432ade65f9a48120 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 7 Oct 2024 18:51:54 +0530 Subject: [PATCH 055/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index cca4bb78..1e902c91 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -38,5 +38,19 @@ jobs: ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} - SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} # Optional if you want a different token for this repo - SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} + # SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} # Optional if you want a different token for this repo + # SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} + notify-slack: + needs: build-dockers # Ensures this job runs after build-dockers + uses: bn46/kattu/.github/workflows/slack-notification.yml@develop # Call the reusable Slack Notification workflow + with: + pr_author: ${{ github.event.pull_request.user.login }} # Use the PR author if available + repo: ${{ github.repository }} + commit: ${{ github.sha }} + message: ${{ github.event.head_commit.message }} # or ${{ github.event.inputs.message }} for manual triggers + status: ${{ job.status }} + workflow: ${{ github.workflow }} + job_name: ${{ github.job }} + secrets: + slack_mapping: ${{ secrets.SLACK_MAPPING }} # Centralized secret + slack_oauth_token: ${{ secrets.SLACK_OAUTH_TOKEN }} # Centralized secret From 128233f993bf7dd79bbaded67a8b2251885988ff Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 7 Oct 2024 19:47:58 +0530 Subject: [PATCH 056/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 53 ++++++++++++++++-------------- 1 file changed, 28 insertions(+), 25 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 1e902c91..1890d9e7 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,28 +29,31 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - uses: bn46/kattu/.github/workflows/docker-build.yml@develop - with: - SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} - SERVICE_NAME: ${{ matrix.SERVICE_NAME }} - secrets: - DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} - ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} - RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} - # SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} # Optional if you want a different token for this repo - # SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} - notify-slack: - needs: build-dockers # Ensures this job runs after build-dockers - uses: bn46/kattu/.github/workflows/slack-notification.yml@develop # Call the reusable Slack Notification workflow - with: - pr_author: ${{ github.event.pull_request.user.login }} # Use the PR author if available - repo: ${{ github.repository }} - commit: ${{ github.sha }} - message: ${{ github.event.head_commit.message }} # or ${{ github.event.inputs.message }} for manual triggers - status: ${{ job.status }} - workflow: ${{ github.workflow }} - job_name: ${{ github.job }} - secrets: - slack_mapping: ${{ secrets.SLACK_MAPPING }} # Centralized secret - slack_oauth_token: ${{ secrets.SLACK_OAUTH_TOKEN }} # Centralized secret + runs-on: ubuntu-latest # Ensure you specify the runner + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Build Docker Image + uses: bn46/kattu/.github/workflows/docker-build.yml@develop + with: + SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} + SERVICE_NAME: ${{ matrix.SERVICE_NAME }} + env: + DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} + ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} + RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} + + - name: Notify Slack + uses: ./path-to-your-slack-notification-action # Update this with the correct path + env: + SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} + SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} + PR_AUTHOR: ${{ github.actor }} # Use the GitHub actor or define your own logic + REPO: ${{ github.repository }} + COMMIT: ${{ github.sha }} + MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} + STATUS: ${{ job.status }} # Now this is valid because it's inside steps + WORKFLOW: ${{ github.workflow }} + JOB_NAME: ${{ job.name }} From aa25c78782e7972d201223ac819d5371c7e86d4d Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 7 Oct 2024 19:51:42 +0530 Subject: [PATCH 057/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 1890d9e7..5f9b4b58 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,12 +29,12 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - runs-on: ubuntu-latest # Ensure you specify the runner + runs-on: ubuntu-latest steps: - - name: Checkout code + - name: Checkout repository uses: actions/checkout@v2 - - - name: Build Docker Image + + - name: Build and push Docker images uses: bn46/kattu/.github/workflows/docker-build.yml@develop with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} @@ -43,17 +43,17 @@ jobs: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} - - name: Notify Slack - uses: ./path-to-your-slack-notification-action # Update this with the correct path + - name: Send Slack Notification + uses: ./.github/actions/slack-notification-action + with: + pr_author: ${{ github.actor }} # Use the actor as PR author if it's triggered by a PR + repo: ${{ github.repository }} + commit: ${{ github.sha }} + message: ${{ github.event.head_commit.message }} + status: ${{ job.status }} + workflow: ${{ github.workflow }} + job_name: ${{ job.name }} env: SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} - PR_AUTHOR: ${{ github.actor }} # Use the GitHub actor or define your own logic - REPO: ${{ github.repository }} - COMMIT: ${{ github.sha }} - MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} - STATUS: ${{ job.status }} # Now this is valid because it's inside steps - WORKFLOW: ${{ github.workflow }} - JOB_NAME: ${{ job.name }} From 833e960c262ca5314ccdd51d6792b078601b85ec Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 7 Oct 2024 20:41:14 +0530 Subject: [PATCH 058/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 39 +++++++++--------------------- 1 file changed, 11 insertions(+), 28 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 5f9b4b58..fd9165ee 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,31 +29,14 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Build and push Docker images - uses: bn46/kattu/.github/workflows/docker-build.yml@develop - with: - SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} - SERVICE_NAME: ${{ matrix.SERVICE_NAME }} - env: - DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} - ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} - RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - - - name: Send Slack Notification - uses: ./.github/actions/slack-notification-action - with: - pr_author: ${{ github.actor }} # Use the actor as PR author if it's triggered by a PR - repo: ${{ github.repository }} - commit: ${{ github.sha }} - message: ${{ github.event.head_commit.message }} - status: ${{ job.status }} - workflow: ${{ github.workflow }} - job_name: ${{ job.name }} - env: - SLACK_MAPPING: ${{ secrets.SLACK_MAPPING }} - SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} + uses: mosip/kattu/.github/workflows/docker-build.yml@master + with: + SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} + SERVICE_NAME: ${{ matrix.SERVICE_NAME }} + secrets: + DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} + ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} + RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} + SLACK_USER_MAPPING: ${{ secrets.SLACK_USER_MAPPING }} + SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_USER_MAPPING }} From 841cc95bd8ed32b93ba156e14db5dd5d67d0663c Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:25:36 +0530 Subject: [PATCH 059/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index fd9165ee..4543da38 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -39,4 +39,4 @@ jobs: RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} SLACK_USER_MAPPING: ${{ secrets.SLACK_USER_MAPPING }} - SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_USER_MAPPING }} + SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} From 4110895c12339cac9b68b408763c651bcb548f31 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:26:05 +0530 Subject: [PATCH 060/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 4543da38..24a14f2d 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,7 +29,7 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - uses: mosip/kattu/.github/workflows/docker-build.yml@master + uses: bn46/kattu/.github/workflows/docker-build.yml@develop with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} From 32df67f2eea0629d897e6d5241f36c762c7cdd01 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:29:19 +0530 Subject: [PATCH 061/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 24a14f2d..6f4c0de4 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -39,4 +39,4 @@ jobs: RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} SLACK_USER_MAPPING: ${{ secrets.SLACK_USER_MAPPING }} - SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} + KEY: ${{ secrets.SLACK_OAUTH_TOKEN }} From fe63eac142f86e42cf9529afce3e284c4e3e7a6f Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:36:11 +0530 Subject: [PATCH 062/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 6f4c0de4..24a14f2d 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -39,4 +39,4 @@ jobs: RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} SLACK_USER_MAPPING: ${{ secrets.SLACK_USER_MAPPING }} - KEY: ${{ secrets.SLACK_OAUTH_TOKEN }} + SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} From ebde57e0de4a2661698da05455f8b5a190a8f478 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 12:24:10 +0530 Subject: [PATCH 063/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 63 ++++++++++++------------------ 1 file changed, 26 insertions(+), 37 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 24a14f2d..cd64578c 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -1,42 +1,31 @@ -name: Build data-archive +name: Notify PR Failure on: - release: - types: [published] pull_request: - types: [opened, reopened, synchronize] - workflow_dispatch: - inputs: - message: - description: 'Message for manually triggering' - required: false - default: 'Triggered for Updates' - type: string - push: - branches: - - master - - 1.* - - develop - - release* - - MOSIP* + types: [opened, synchronize] jobs: - build-dockers: - strategy: - matrix: - include: - - SERVICE_LOCATION: 'data-archive' - SERVICE_NAME: 'data-archive' - fail-fast: false - name: ${{ matrix.SERVICE_NAME }} - uses: bn46/kattu/.github/workflows/docker-build.yml@develop - with: - SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} - SERVICE_NAME: ${{ matrix.SERVICE_NAME }} - secrets: - DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} - ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} - RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} - SLACK_USER_MAPPING: ${{ secrets.SLACK_USER_MAPPING }} - SLACK_OAUTH_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} + notify-failure: + runs-on: ubuntu-latest + steps: + - name: Checkout Code + uses: actions/checkout@v2 + + - name: Get PR author + id: pr_author + run: echo "author=${{ github.event.pull_request.user.login }}" >> $GITHUB_OUTPUT + + - name: Get Slack username from Secret Mapping + id: slack_mapping + run: | + github_username="${{ steps.pr_author.outputs.author }}" + slack_username=$(echo '${{ secrets.SLACK_MAP }}' | jq -r --arg ghuser "$github_username" '.[$ghuser]') + echo "slack_user=${slack_username}" >> $GITHUB_OUTPUT + + - name: Notify via Slack + if: steps.slack_mapping.outputs.slack_user != '' + uses: slackapi/slack-github-action@v1.15.0 + with: + slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} + channel-id: ${{ steps.slack_mapping.outputs.slack_user }} # Ensure this is correct + text: "Your PR #${{ github.event.pull_request.number }} has failed." From ef6b42b315e67c1f8b0700aa8c15afb771546347 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 13:31:17 +0530 Subject: [PATCH 064/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 31 +++++++++++------------------- 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index cd64578c..5b56c83a 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -1,31 +1,22 @@ -name: Notify PR Failure +name: Slack Notification on Pull Request on: pull_request: types: [opened, synchronize] jobs: - notify-failure: + notify: runs-on: ubuntu-latest + steps: - - name: Checkout Code + - name: Checkout repository uses: actions/checkout@v2 - - name: Get PR author - id: pr_author - run: echo "author=${{ github.event.pull_request.user.login }}" >> $GITHUB_OUTPUT - - - name: Get Slack username from Secret Mapping - id: slack_mapping - run: | - github_username="${{ steps.pr_author.outputs.author }}" - slack_username=$(echo '${{ secrets.SLACK_MAP }}' | jq -r --arg ghuser "$github_username" '.[$ghuser]') - echo "slack_user=${slack_username}" >> $GITHUB_OUTPUT - - - name: Notify via Slack - if: steps.slack_mapping.outputs.slack_user != '' - uses: slackapi/slack-github-action@v1.15.0 + - name: Post to a Slack channel + id: slack + uses: slackapi/slack-github-action@v1.27.0 with: - slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} - channel-id: ${{ steps.slack_mapping.outputs.slack_user }} # Ensure this is correct - text: "Your PR #${{ github.event.pull_request.number }} has failed." + channel-id: 'U07QEJS9LM9' # Replace with your actual channel IDs + slack-message: "GitHub build result: ${{ job.status }}\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}" + env: + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} From 8508d2b538cffcba3fffad037967ff79443a8dd8 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 13:36:45 +0530 Subject: [PATCH 065/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 5b56c83a..b6a932cc 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -19,4 +19,4 @@ jobs: channel-id: 'U07QEJS9LM9' # Replace with your actual channel IDs slack-message: "GitHub build result: ${{ job.status }}\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}" env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} From 151e76ecd147e4e73ecc2a40d3808b0529dbc42b Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 14:47:47 +0530 Subject: [PATCH 066/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index b6a932cc..d03ddd98 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -10,7 +10,12 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 + + - name: Use OAuth Token + run: | + echo "Using OAuth Token" + echo "${{ secrets.SLACK_OAUTH_TOKEN }}" - name: Post to a Slack channel id: slack From af9d8e3f305b25ba5d9c7568aba29db314304682 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 14:58:26 +0530 Subject: [PATCH 067/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index d03ddd98..13dd3131 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -13,10 +13,7 @@ jobs: uses: actions/checkout@v3 - name: Use OAuth Token - run: | - echo "Using OAuth Token" - echo "${{ secrets.SLACK_OAUTH_TOKEN }}" - + run: "echo Using OAuth Token: '${{ secrets.SLACK_OAUTH_TOKEN }}'" - name: Post to a Slack channel id: slack uses: slackapi/slack-github-action@v1.27.0 From 70986ab755063ba022be38f1bdf39d9909c24e7c Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 14:58:44 +0530 Subject: [PATCH 068/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 13dd3131..bb266252 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -20,5 +20,4 @@ jobs: with: channel-id: 'U07QEJS9LM9' # Replace with your actual channel IDs slack-message: "GitHub build result: ${{ job.status }}\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}" - env: SLACK_BOT_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} From cb217b696335deb4dfb39073867e0ab64d8d988c Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 15:03:10 +0530 Subject: [PATCH 069/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index bb266252..f4ddf923 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -20,4 +20,5 @@ jobs: with: channel-id: 'U07QEJS9LM9' # Replace with your actual channel IDs slack-message: "GitHub build result: ${{ job.status }}\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}" + env: SLACK_BOT_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} From 6c045d50c31609f91067c2290ab455e512b8acd9 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 15:37:04 +0530 Subject: [PATCH 070/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index f4ddf923..a66c995c 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -11,14 +11,24 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v3 - - - name: Use OAuth Token - run: "echo Using OAuth Token: '${{ secrets.SLACK_OAUTH_TOKEN }}'" + - name: Test + env: + TEST_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TEST_SECRET: ${{ secrets.TEST_SECRET }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + run: | + echo ${#TEST_GITHUB_TOKEN} + echo ${#TEST_SECRET} + echo ${#SLACK_BOT_TOKEN} - name: Post to a Slack channel id: slack uses: slackapi/slack-github-action@v1.27.0 with: - channel-id: 'U07QEJS9LM9' # Replace with your actual channel IDs + # Slack channel id, channel name, or user id to post message. + # See also: https://api.slack.com/methods/chat.postMessage#channels + # You can pass in multiple channels to post to by providing a comma-delimited list of channel IDs. + channel-id: 'U07QEJS9LM9' + # For posting a simple plain text message slack-message: "GitHub build result: ${{ job.status }}\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}" - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_OAUTH_TOKEN }} + env: + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} From 4afa4e023743bf7894ebff68041377e35e22e5e2 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 8 Oct 2024 15:38:58 +0530 Subject: [PATCH 071/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index a66c995c..2da58889 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -3,7 +3,13 @@ name: Slack Notification on Pull Request on: pull_request: types: [opened, synchronize] - + push: + branches: + - master + - 1.* + - develop + - release* + - MOSIP* jobs: notify: runs-on: ubuntu-latest From 8c356d76eb62fedda877f4dd9faf7f3d0a79e5c8 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 9 Oct 2024 13:20:06 +0530 Subject: [PATCH 072/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 2da58889..4e226c5e 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -10,6 +10,7 @@ on: - develop - release* - MOSIP* + jobs: notify: runs-on: ubuntu-latest @@ -30,11 +31,17 @@ jobs: id: slack uses: slackapi/slack-github-action@v1.27.0 with: - # Slack channel id, channel name, or user id to post message. - # See also: https://api.slack.com/methods/chat.postMessage#channels - # You can pass in multiple channels to post to by providing a comma-delimited list of channel IDs. channel-id: 'U07QEJS9LM9' - # For posting a simple plain text message - slack-message: "GitHub build result: ${{ job.status }}\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}" + slack-message: | + *GitHub Action Notification* :rocket: + *Repository*: ${{ github.repository }} + *Message*: ${{ github.event.head_commit.message || github.event.pull_request.title }} + *Commit*: ${{ github.sha }} + *Author*: ${{ github.event.head_commit.author.name || github.event.pull_request.user.login }} + *Action*: ${{ github.event.action }} + *Event Name*: ${{ github.event_name }} + *Branch (ref)*: ${{ github.ref }} + *Workflow*: ${{ github.workflow }} + *PR/Commit URL*: ${{ github.event.pull_request.html_url || github.event.head_commit.url }} env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} From 636ebc21d8cc5e080aeba79f789b0de8a8dc8a18 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 9 Oct 2024 13:38:28 +0530 Subject: [PATCH 073/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 4e226c5e..f44d67dc 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -18,26 +18,35 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v3 - - name: Test - env: - TEST_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - TEST_SECRET: ${{ secrets.TEST_SECRET }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + + - name: Get PR author + id: author + run: echo "author=${{ github.event.pull_request.user.login }}" >> $GITHUB_ENV + + - name: Decode Slack Users Secret + id: slack_id run: | - echo ${#TEST_GITHUB_TOKEN} - echo ${#TEST_SECRET} - echo ${#SLACK_BOT_TOKEN} + # Decode the JSON string from the secret + SLACK_USERS_MAP="${{ secrets.SLACK_USERS_MAP }}" + # Use jq to extract the Slack ID corresponding to the GitHub username + SLACK_USER_ID=$(echo "$SLACK_USERS_MAP" | jq -r --arg github_user "${{ env.author }}" '.[$github_user]') + if [ -z "$SLACK_USER_ID" ] || [ "$SLACK_USER_ID" == "null" ]; then + echo "Slack user ID not found for GitHub user: ${{ env.author }}" + exit 1 + fi + echo "SLACK_USER_ID=$SLACK_USER_ID" >> $GITHUB_ENV + - name: Post to a Slack channel id: slack uses: slackapi/slack-github-action@v1.27.0 with: - channel-id: 'U07QEJS9LM9' + channel-id: ${{ env.SLACK_USER_ID }} slack-message: | *GitHub Action Notification* :rocket: *Repository*: ${{ github.repository }} *Message*: ${{ github.event.head_commit.message || github.event.pull_request.title }} *Commit*: ${{ github.sha }} - *Author*: ${{ github.event.head_commit.author.name || github.event.pull_request.user.login }} + *Author*: ${{ github.event.pull_request.user.login }} *Action*: ${{ github.event.action }} *Event Name*: ${{ github.event_name }} *Branch (ref)*: ${{ github.ref }} From 6daf288c01e9f29043cc67ac005227aef4dbb93b Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 9 Oct 2024 15:12:03 +0530 Subject: [PATCH 074/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index f44d67dc..ee1494de 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -26,16 +26,26 @@ jobs: - name: Decode Slack Users Secret id: slack_id run: | - # Decode the JSON string from the secret + # Access the JSON string from the secret SLACK_USERS_MAP="${{ secrets.SLACK_USERS_MAP }}" + + # Extract GitHub username of the PR author + GITHUB_USER="${{ github.event.pull_request.user.login }}" + # Use jq to extract the Slack ID corresponding to the GitHub username - SLACK_USER_ID=$(echo "$SLACK_USERS_MAP" | jq -r --arg github_user "${{ env.author }}" '.[$github_user]') + SLACK_USER_ID=$(echo "$SLACK_USERS_MAP" | jq -r --arg github_user "$GITHUB_USER" '.[$github_user]') + + # Check if the Slack user ID was found if [ -z "$SLACK_USER_ID" ] || [ "$SLACK_USER_ID" == "null" ]; then - echo "Slack user ID not found for GitHub user: ${{ env.author }}" + echo "Slack user ID not found for GitHub user: $GITHUB_USER" exit 1 fi + + # Export the Slack user ID to the GitHub environment echo "SLACK_USER_ID=$SLACK_USER_ID" >> $GITHUB_ENV + shell: bash + - name: Post to a Slack channel id: slack uses: slackapi/slack-github-action@v1.27.0 From 6fc0d5fcc7e158843d05a50f6c6f7c740cd28192 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 9 Oct 2024 15:18:22 +0530 Subject: [PATCH 075/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index ee1494de..584df8a6 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,6 +29,9 @@ jobs: # Access the JSON string from the secret SLACK_USERS_MAP="${{ secrets.SLACK_USERS_MAP }}" + # Debug: Print the JSON to ensure it's properly formatted + echo "SLACK_USERS_MAP: $SLACK_USERS_MAP" + # Extract GitHub username of the PR author GITHUB_USER="${{ github.event.pull_request.user.login }}" From 49685a83a98485eb860cdfd818593ecad2f00f76 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 9 Oct 2024 15:40:51 +0530 Subject: [PATCH 076/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 584df8a6..131df75e 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -36,7 +36,9 @@ jobs: GITHUB_USER="${{ github.event.pull_request.user.login }}" # Use jq to extract the Slack ID corresponding to the GitHub username - SLACK_USER_ID=$(echo "$SLACK_USERS_MAP" | jq -r --arg github_user "$GITHUB_USER" '.[$github_user]') + #SLACK_USER_ID=$(echo "$SLACK_USERS_MAP" | jq -r --arg github_user "$GITHUB_USER" '.[$github_user]') + SLACK_USER_ID=$(echo "$SLACK_USERS_MAP" | grep -oP "$GITHUB_USER:\K[^,]+") + echo $SLACK_USER_ID # Check if the Slack user ID was found if [ -z "$SLACK_USER_ID" ] || [ "$SLACK_USER_ID" == "null" ]; then From 819c02af05ef79ad6a195817100085bd94c48302 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 11 Oct 2024 14:07:53 +0530 Subject: [PATCH 077/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 93 +++++++++++------------------- 1 file changed, 34 insertions(+), 59 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 131df75e..3b5aefa2 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -1,8 +1,17 @@ -name: Slack Notification on Pull Request +name: Build data-archive on: + release: + types: [published] pull_request: - types: [opened, synchronize] + types: [opened, reopened, synchronize] + workflow_dispatch: + inputs: + message: + description: 'Message for manually triggering' + required: false + default: 'Triggered for Updates' + type: string push: branches: - master @@ -12,60 +21,26 @@ on: - MOSIP* jobs: - notify: - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Get PR author - id: author - run: echo "author=${{ github.event.pull_request.user.login }}" >> $GITHUB_ENV - - - name: Decode Slack Users Secret - id: slack_id - run: | - # Access the JSON string from the secret - SLACK_USERS_MAP="${{ secrets.SLACK_USERS_MAP }}" - - # Debug: Print the JSON to ensure it's properly formatted - echo "SLACK_USERS_MAP: $SLACK_USERS_MAP" - - # Extract GitHub username of the PR author - GITHUB_USER="${{ github.event.pull_request.user.login }}" - - # Use jq to extract the Slack ID corresponding to the GitHub username - #SLACK_USER_ID=$(echo "$SLACK_USERS_MAP" | jq -r --arg github_user "$GITHUB_USER" '.[$github_user]') - SLACK_USER_ID=$(echo "$SLACK_USERS_MAP" | grep -oP "$GITHUB_USER:\K[^,]+") - echo $SLACK_USER_ID - - # Check if the Slack user ID was found - if [ -z "$SLACK_USER_ID" ] || [ "$SLACK_USER_ID" == "null" ]; then - echo "Slack user ID not found for GitHub user: $GITHUB_USER" - exit 1 - fi - - # Export the Slack user ID to the GitHub environment - echo "SLACK_USER_ID=$SLACK_USER_ID" >> $GITHUB_ENV - - shell: bash - - - name: Post to a Slack channel - id: slack - uses: slackapi/slack-github-action@v1.27.0 - with: - channel-id: ${{ env.SLACK_USER_ID }} - slack-message: | - *GitHub Action Notification* :rocket: - *Repository*: ${{ github.repository }} - *Message*: ${{ github.event.head_commit.message || github.event.pull_request.title }} - *Commit*: ${{ github.sha }} - *Author*: ${{ github.event.pull_request.user.login }} - *Action*: ${{ github.event.action }} - *Event Name*: ${{ github.event_name }} - *Branch (ref)*: ${{ github.ref }} - *Workflow*: ${{ github.workflow }} - *PR/Commit URL*: ${{ github.event.pull_request.html_url || github.event.head_commit.url }} - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + build-dockers: + strategy: + matrix: + include: + - SERVICE_LOCATION: 'data-archive' + SERVICE_NAME: 'data-archive' + fail-fast: false + name: ${{ matrix.SERVICE_NAME }} + uses: bn46/kattu/.github/workflows/docker-build.yml@develop + with: + SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} + SERVICE_NAME: ${{ matrix.SERVICE_NAME }} + GITHUB_USER: ${{ github.event.pull_request.user.login }} + PR_URL: ${{ github.event.pull_request.html_url }} + PR_TITLE: ${{ github.event.pull_request.title }} + REPOSITORY: ${{ github.repository }} + ACTION: ${{ github.event.action }} + secrets: + DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} + ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} + RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow + GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow From ee95b907d400d5e7914b58dbdff79139497eed3a Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 11 Oct 2024 14:40:40 +0530 Subject: [PATCH 078/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 3b5aefa2..ed0dfab8 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -44,3 +44,4 @@ jobs: RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow + SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} From ab8434ae35c2e082a6f9c549e08123e399485253 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 11 Oct 2024 17:12:45 +0530 Subject: [PATCH 079/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index ed0dfab8..65d49f49 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -42,6 +42,6 @@ jobs: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow - GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow - SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} + # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow + # GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow + # SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} From 2b8f1ca1e022b998b2f7960da1ec20ec4a39ef91 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Sun, 13 Oct 2024 12:45:06 +0530 Subject: [PATCH 080/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 65d49f49..ed0dfab8 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -42,6 +42,6 @@ jobs: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow - # GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow - # SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow + GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow + SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} From 5059689d543eaa80c5ed03e697b188f97e1ad0aa Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 14 Oct 2024 11:51:07 +0530 Subject: [PATCH 081/130] Update requirements.txt Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- data-archive/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/data-archive/requirements.txt b/data-archive/requirements.txt index 100e24e4..77b6a4ca 100644 --- a/data-archive/requirements.txt +++ b/data-archive/requirements.txt @@ -1,3 +1,3 @@ -psycopg2-binary==2.9.6 -configparser==5.2.0 -datetime==5.2.0 +psycopg2-binary==2.9.3 +configparser==5.2.3 +datetime==6 From 4ff2c688aca90dbe19c5aec7540e87800469bd8a Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Wed, 16 Oct 2024 16:57:50 +0530 Subject: [PATCH 082/130] added gpg Signed-off-by: bhumi46 --- .github/keys/user_map.json.gpg | Bin 0 -> 2901 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 .github/keys/user_map.json.gpg diff --git a/.github/keys/user_map.json.gpg b/.github/keys/user_map.json.gpg new file mode 100644 index 0000000000000000000000000000000000000000..765bea8b270fb45af64fad0cf2905eb66834c6e7 GIT binary patch literal 2901 zcmV-b3##;t4Fm}T0z@;Ka00pCZB9AZM3JlcEtCn%%ZYt?D#>SAT2^$~U| zE1-(om|$BT{W>jXEdj9hn>V5P9wUJID&PP~-`O=xsxG43B#xuE!k|E#=& zZ)K_iEQo0595M(c@WJGtdyOWL+vE1LvAX34b8mrnran&l1WIT_dHp5r?x)I={_XN2}v)jd}fN{Ysg z$f`D^nm=2HjsXZzZe8nUYS6#Y_0vDtr5wsKnRc`Mr&#uLOb{3xm>K$?nCaSv|uKBUC4kI0@gM8rm>w~;t zorv|(v~H9!nN!^mhN^rpUot=wr$oz(8>&NRvy3m-2rXasaTegC(%rW2_C^=@D04{r z%(BAB?2+Wfl?Z@aeOdvkqF+xNQ^W|`;e2-VZWgcpYjGVC7JER?uO!ph0?)X*Y3kx` zk4$?pA?&hCJ%w#ZsXSZKx;Ld-@D!_AN`xQvfqgzGp?N!Has?d##f{ZFYQIJ4CzB;v zq|?T%sR~~_nQY5FN5gr3$Q5Cr`uxLl>`w-p2!`%H-Smd17hx!E@>h%~~aJls(_w>KPwC zv-9@M+r+{(fu@1~}h33;YoKq^5djD;;iJ=|UZdLuYZ2ngl8DzWw z>8pXI*X2{_?LwW$VpaWny#%%y#;H*a--mX6whI0s8B+a7*Xqo>phT^8j8ZsyO41z> zcB@N$N&$lMqZMc}EUDD{*h+8uI}^nQ9i&NaD*r*tHz>;F>F;~MBM%3ur5sT zSvgZdjq}-SN%xiMWEq zrU-YojQSdJQ*gZNn=#sWELP~4h~^K*nbO2lPdO_RGi|fV=~zY^8$_KR%X{b^M(pa& zDY)14rN=V4!-j@?2ouCU5B*(s@M)t&T`v!6@a&<#mWM33>6A(a%XompbTU zJdo`SGM4pAIKC9Qt1LrOgM!y1zC!x{>PI8ppN|KIch`8ZKbG(Qb$?(nqL3lhrbR}i(gdi-j2fh;3b;l1*<{J6+n zc^$Zy?0z}KaK)2s8nSw?Li*6rxno^ajLRLwc+_SJ+51P8o#*?75G9hz-1!X^=r&9LNhP5yMLGaB`P!Pop5;qwj)kuo zA09eTzauw&5Rr$R_JwNFkL5jamqeAZ^XwHVx#wG5-D4}aQ3kQkd^cQn1buvyhb=0e z4;MBi|33cpTHUtJ*~J+#mpzO7P9k>C-{#_`)Ezw{`qamJEx}BWs8$Pcj#S~uOjt_G zQvRzE1KNu1UarbZafu%BV3ePQ!Ab%+g1w13fG_H=A|{Y1zHd4rVkF(iH=6dN&N*$e zWP=+c{>UIgh@$=OdO6#umcNz=Uvjn7QeaE$c$VXtR|;`WM7o#Qk(8 zeLlUo3d2BTk^aIAu3K#UNT&e*+7Wdm%m#xjdV4EGq z)d^d?ibYNG+tzwJ$iJ=A=VjawikYSCo+Z5`y~_za|~>Yw*!r6=_p(GZj+4d#bvQ4S?FLD&VJpp!Y8-LRY4;9*UjEb73b8J z;hLjW3Y|XuJthgx1W>NF?fK>b<%`}p&!5-Jv#ookNMFC#CnHK3dl?~gvZQR;EI02Z z!7aid;j{zzoPe8*6E>`+%>O<5cPRQpvWh?9>qjfgykGTsHG@x=W46;W>3$}7FWjaE z8EQfHt(wCWyENFIv*}fA9$aY21SFH(5+8B^xeGKC=cF4t;hj2_vz(?Tmlpd}ecb|I z{sWE*O`4P(;M(tN*##ulDLL8;b{l%*UZP)wA@O1wOf*GI&_dgqFmf%2H4q`+z*AvNvWeCjxKpEj}|-H5u!Av<>``Y%&|T*!N1uG3LoZkfY$BR&6x zscT35D`QR6nw%r{9G=A6Px@F&9 zy~Eh;r}1f+!}+DGC6=Tn$aRatCHj8#RFelS6{WrnkWBw+S56$rbo+Y% z-71b`Vj|o=V{^s=Yup%@VB^PK_c#>2qS$j4Pp6QgFVs-otm%*`e$KhrE|&ChSXiWZ zeH22h$tfIV@C|azsxK@Y^I>=O$UK6UFx$QJn}L!2=don)9KeCo6!3kj8SORGmvq7UZr-78&vZcjW6rZN zZC4vDQ$$D&SXb%p!S9N%36pmKwvWv!^(>fh+j2oV@}O5S>CXC_grIYR`5XAG44G&n z)B_WB)CDs2)!nEjydNu%k`d-=q{Gvw z)4Vry<0|$zY@k2TP$CH$IjlZ=%fQR($0!nMgpj5cY_~Z(2R6e`a5zXUN=50wTNSfj z31)Cee(M6VX0&LIgj-^&R5n82)QK51e4FBMFK_F1^M6aK9rH&{JW)8+&@NE`rE-uc zlJj%)(cY3gj3gOqoPGA$xSTd%sJj$@r;jj>!VH47D~<6?9xEBKlQ_PT&{F%d8G)_e literal 0 HcmV?d00001 From 307907367a6d5f6c8c134d81fc4ed518c700b3d2 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 16 Oct 2024 17:23:11 +0530 Subject: [PATCH 083/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index ed0dfab8..6538cb23 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -45,3 +45,4 @@ jobs: SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} + GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} From b1539b5de51d034814e2c94ef915f4192fd9bf6a Mon Sep 17 00:00:00 2001 From: bhumi46 Date: Wed, 16 Oct 2024 18:19:38 +0530 Subject: [PATCH 084/130] updated gpg Signed-off-by: bhumi46 --- .github/keys/user_map.json.gpg | Bin 2901 -> 2876 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/.github/keys/user_map.json.gpg b/.github/keys/user_map.json.gpg index 765bea8b270fb45af64fad0cf2905eb66834c6e7..d0a08e26ced367db0486a3fea54355c1600a1d05 100644 GIT binary patch literal 2876 zcmV-C3&Zq`4Fm}T0#O!9Sn7r=X#djd0mmJZx!6QiDzy(9LZ)J5IoG=Oq1K;mcPlIi zgpa(5N%ck0q$VJK=_^|$EaVv9tbIXuWz66@6J#XJJ;6P-+ZczGr~&MH8S(5L`e<*U zAC`_yhu^Eyi`CxudkQs!ib!+GWH(%aE;~EC=5g!pN zK}w9#>Rg=c~wQn!R#XX{`@5)`8}n?eAQ{1PpW`N7I?7 zTAS#>79l8=*L#r7p5}&XK@o)7KzpY`!>AqUF?66W8U6(O1)kN`QtA--Z48!Ww!V;- znjRl%lp#lid&F`gPii)g|J9r;F`NS-C3}Iwa*H(dbb$TXPh@h^5JQ3x=iosiV&j!f zGALIHhM0@a-`XEIwSDko69s!AjaQ^hJDyLZ3jVCXop#yB#C6JeGA(VKpmP-f1G1z4 zM@PwrubYOKo`R>jKtx6y>qc*DX2%5cv1D?mp)Ur7ZjP$!72*#Ar^(pgC2C5YIp^L| z|E0FUq*q8n^zlPTYr5T5dgn7A{)8TQ^F=-b74s`T0f=Le-Um!Iund%?eq%gIS^l-90P6ML+w$h_9Ced|)J3VXuZ`x?*hq;Zf+-=2Zq_~i>S9bd zP-&^o-ZmS-lMv1`ZfEm&L1xx(`@xqKPOLzy9}cYe$ixnk(s2l9e^&V#j{eysY^mcV*1ye+9_!6FOqn2hk8>T3P<@ZOATK1n zV!JyTUJrT8|e2ZMBN*-00&?=x=iR;L8o ze~q-^P@x-KntJ7D8n$mh4mbPhJ6o>Pw?KWkBo4MUZf^pF-!wOVAJiui`9H>ZN5=CA zthG(OwX56fP_mS_0LPO0IfCB>HK9;Ueir6LvH>*nfZ=0=ch=Zmfz)jIc_7gTU-g=} zAd6@U`3Gqfz?L?kacwxPj*5Vxt8K^AdoVaV2I!7uA%Zz88B4f@(AzrLTxsJTw2!*( zXnWFcOW-!QFrp7pj;br95(<#RCeNZ9R$klD7!;3!tg*;c8g9B#Yc*~aP=~*}Gu&Z8 zb(9SI=0Gl+nV&CRJBqrQHROb1v}$NL;nP(dq2%|JS;x-8>s#qrPXswQ;vTN|lTlml zccmQH@<3w{^$lMedmSdTmeZM-WRVwnlW&TrLJww{qTV&>5jDl;8w58&OqaAqTQOF$ zD~%!Mkb@Tz55H1{tLt%7>p9D(Y1dizwqOCDzHdi3nzhz2Ys*?sa07{!pZ<=n#Fte8 z8~Fj3Mh3Wmt+uV~zge&(;HI}6$eb_a?%90G$@j{9uGCA@kSG;|Iofk`%O6*h3&bh5 zn?NCmM@;32a|JRGy3Zb^ZWnihrTy`dU9T1h2!E=p;^9{WB zNcv8sbCt{*LWNu=DPh8RoObz3>%!|=0kUbX0WPEEC&GibO{saAeopB<^OzSO@2(~H zMlgaz6nI|-Paf}E&`lJOHv!65{+8T8C?mBJX`5&gAA!&3Dx|-0J*q|9jjkf%Zd|th zO$T`htTq1E0rV?MW*~vNxNQ2>)@YtPiP&@d0+iJGAkw#RbN7*<1ZuhtWC;R9^&5p?%EKj zKF7e<$KzNjxc?P(NPyYihJD*mY)x~^D|PJ$a;M*8vg{^E^hGq?a~ZFD=02r3#{*al zHaqVQeYdmv7<{qrFi&Ol7Kkhh_9>=^{rmGhJQ2=}j8vnl5yp6g4fe6L! zLq*l7<3Y>)`k`>c{?ZF8b9A?ErzqH=`E;kd?xT?in}=BM8|ysIVSI5A(9k1d5Eo9; zr4wuJY)aA;wrIQ9giB_hFd!?TnA0#4rhJApxQ91zW152hm;4u|YloAEc7ckbI z*i#u=d5{1OtN-skMzU$Eabcm~H~NGqR^)WymKgiyaG$gpYvDC6FsoPqL@ES47mmcD4C&-zA(*gw+oqTAJv(*7y|Sg7=I4$J zCjfu5^EP@c>|RB6YO++HXT0ri}s)C^K?rs#+P9I%Rqm1l2} zvw3hF{YOqn9?~V><>j=;Wh~_btP^>L<@dN*B}IUY*YlDn&2^lrj!#dPK2~OcEA4bt zqIi$4P_eJtu!qo|oGb2L4F*HJj*|0!SwEeq+Mx?IYUz=PKrk4QJfh~FKErw@EITiP2B}Oomv7U5v>v;JcAN%^G)ZzMMcyu*^Mf)u z3r8g;;2}8nlejvT5Q8*f(IG$Qhb4pWlS))}o~=wo`YnC5%X1IO4FrehN5Qs|Tr>L3 z4qxK%X%(8lbB<0b|2$ry)`@|H!9;|D;9XY z2J}31Y^;^2=Ac40NfIuFh&T<5u+igcL|S8ZH0ug3msQRE<|-0?j&aR^>FhHx9r#Od zW?A~0LqN;GQpJda9De^J9N)Q;(mAgp_0Hd}W>WG6f%f*#LBMDQ+L7kyim7+C${R;! z--VN+G$s=}{*f~?G2BvSfnAztPFAo@p*<2PxhmaYMxptEIEt>XvPMy$70r%vqhPfC z{a9t>2JlDvg(NTg(Q&xwGsVE9ZBlp#WB!EAzbDg=dG3$vDW>Zik3Va1H^n3$kos`r z=WeV5GQkS80sQ&sB(M$F_SV`LzZI5|HVbCxGQoFn;+%$LRid>I`H zYuJ1@GvD0K_!U48QzcT@-nbQ_%j@)qolj(zv=gu@i;+c#)8xo!Y55O?@TS2PYA@&+ aWkhHaJWl;Ka00pCZB9AZM3JlcEtCn%%ZYt?D#>SAT2^$~U| zE1-(om|$BT{W>jXEdj9hn>V5P9wUJID&PP~-`O=xsxG43B#xuE!k|E#=& zZ)K_iEQo0595M(c@WJGtdyOWL+vE1LvAX34b8mrnran&l1WIT_dHp5r?x)I={_XN2}v)jd}fN{Ysg z$f`D^nm=2HjsXZzZe8nUYS6#Y_0vDtr5wsKnRc`Mr&#uLOb{3xm>K$?nCaSv|uKBUC4kI0@gM8rm>w~;t zorv|(v~H9!nN!^mhN^rpUot=wr$oz(8>&NRvy3m-2rXasaTegC(%rW2_C^=@D04{r z%(BAB?2+Wfl?Z@aeOdvkqF+xNQ^W|`;e2-VZWgcpYjGVC7JER?uO!ph0?)X*Y3kx` zk4$?pA?&hCJ%w#ZsXSZKx;Ld-@D!_AN`xQvfqgzGp?N!Has?d##f{ZFYQIJ4CzB;v zq|?T%sR~~_nQY5FN5gr3$Q5Cr`uxLl>`w-p2!`%H-Smd17hx!E@>h%~~aJls(_w>KPwC zv-9@M+r+{(fu@1~}h33;YoKq^5djD;;iJ=|UZdLuYZ2ngl8DzWw z>8pXI*X2{_?LwW$VpaWny#%%y#;H*a--mX6whI0s8B+a7*Xqo>phT^8j8ZsyO41z> zcB@N$N&$lMqZMc}EUDD{*h+8uI}^nQ9i&NaD*r*tHz>;F>F;~MBM%3ur5sT zSvgZdjq}-SN%xiMWEq zrU-YojQSdJQ*gZNn=#sWELP~4h~^K*nbO2lPdO_RGi|fV=~zY^8$_KR%X{b^M(pa& zDY)14rN=V4!-j@?2ouCU5B*(s@M)t&T`v!6@a&<#mWM33>6A(a%XompbTU zJdo`SGM4pAIKC9Qt1LrOgM!y1zC!x{>PI8ppN|KIch`8ZKbG(Qb$?(nqL3lhrbR}i(gdi-j2fh;3b;l1*<{J6+n zc^$Zy?0z}KaK)2s8nSw?Li*6rxno^ajLRLwc+_SJ+51P8o#*?75G9hz-1!X^=r&9LNhP5yMLGaB`P!Pop5;qwj)kuo zA09eTzauw&5Rr$R_JwNFkL5jamqeAZ^XwHVx#wG5-D4}aQ3kQkd^cQn1buvyhb=0e z4;MBi|33cpTHUtJ*~J+#mpzO7P9k>C-{#_`)Ezw{`qamJEx}BWs8$Pcj#S~uOjt_G zQvRzE1KNu1UarbZafu%BV3ePQ!Ab%+g1w13fG_H=A|{Y1zHd4rVkF(iH=6dN&N*$e zWP=+c{>UIgh@$=OdO6#umcNz=Uvjn7QeaE$c$VXtR|;`WM7o#Qk(8 zeLlUo3d2BTk^aIAu3K#UNT&e*+7Wdm%m#xjdV4EGq z)d^d?ibYNG+tzwJ$iJ=A=VjawikYSCo+Z5`y~_za|~>Yw*!r6=_p(GZj+4d#bvQ4S?FLD&VJpp!Y8-LRY4;9*UjEb73b8J z;hLjW3Y|XuJthgx1W>NF?fK>b<%`}p&!5-Jv#ookNMFC#CnHK3dl?~gvZQR;EI02Z z!7aid;j{zzoPe8*6E>`+%>O<5cPRQpvWh?9>qjfgykGTsHG@x=W46;W>3$}7FWjaE z8EQfHt(wCWyENFIv*}fA9$aY21SFH(5+8B^xeGKC=cF4t;hj2_vz(?Tmlpd}ecb|I z{sWE*O`4P(;M(tN*##ulDLL8;b{l%*UZP)wA@O1wOf*GI&_dgqFmf%2H4q`+z*AvNvWeCjxKpEj}|-H5u!Av<>``Y%&|T*!N1uG3LoZkfY$BR&6x zscT35D`QR6nw%r{9G=A6Px@F&9 zy~Eh;r}1f+!}+DGC6=Tn$aRatCHj8#RFelS6{WrnkWBw+S56$rbo+Y% z-71b`Vj|o=V{^s=Yup%@VB^PK_c#>2qS$j4Pp6QgFVs-otm%*`e$KhrE|&ChSXiWZ zeH22h$tfIV@C|azsxK@Y^I>=O$UK6UFx$QJn}L!2=don)9KeCo6!3kj8SORGmvq7UZr-78&vZcjW6rZN zZC4vDQ$$D&SXb%p!S9N%36pmKwvWv!^(>fh+j2oV@}O5S>CXC_grIYR`5XAG44G&n z)B_WB)CDs2)!nEjydNu%k`d-=q{Gvw z)4Vry<0|$zY@k2TP$CH$IjlZ=%fQR($0!nMgpj5cY_~Z(2R6e`a5zXUN=50wTNSfj z31)Cee(M6VX0&LIgj-^&R5n82)QK51e4FBMFK_F1^M6aK9rH&{JW)8+&@NE`rE-uc zlJj%)(cY3gj3gOqoPGA$xSTd%sJj$@r;jj>!VH47D~<6?9xEBKlQ_PT&{F%d8G)_e From dc83cfc3c36a2315407d629d7ac51e812bf3318e Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 17 Oct 2024 13:01:53 +0530 Subject: [PATCH 085/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 6538cb23..e8f46adb 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -44,5 +44,5 @@ jobs: RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow - SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} - GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} + # SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} + # GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} From 2ff4c748d08c73cdedb3658848ada826938d9ee6 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 17 Oct 2024 13:02:36 +0530 Subject: [PATCH 086/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index e8f46adb..59207e3b 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -42,7 +42,7 @@ jobs: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow - GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow + # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow + # GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow # SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} # GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} From 755d99bd8fd85367d00aeca05165a0eba9062a47 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 17 Oct 2024 13:03:00 +0530 Subject: [PATCH 087/130] Delete .github/keys/user_map.json.gpg Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/keys/user_map.json.gpg | Bin 2876 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 .github/keys/user_map.json.gpg diff --git a/.github/keys/user_map.json.gpg b/.github/keys/user_map.json.gpg deleted file mode 100644 index d0a08e26ced367db0486a3fea54355c1600a1d05..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2876 zcmV-C3&Zq`4Fm}T0#O!9Sn7r=X#djd0mmJZx!6QiDzy(9LZ)J5IoG=Oq1K;mcPlIi zgpa(5N%ck0q$VJK=_^|$EaVv9tbIXuWz66@6J#XJJ;6P-+ZczGr~&MH8S(5L`e<*U zAC`_yhu^Eyi`CxudkQs!ib!+GWH(%aE;~EC=5g!pN zK}w9#>Rg=c~wQn!R#XX{`@5)`8}n?eAQ{1PpW`N7I?7 zTAS#>79l8=*L#r7p5}&XK@o)7KzpY`!>AqUF?66W8U6(O1)kN`QtA--Z48!Ww!V;- znjRl%lp#lid&F`gPii)g|J9r;F`NS-C3}Iwa*H(dbb$TXPh@h^5JQ3x=iosiV&j!f zGALIHhM0@a-`XEIwSDko69s!AjaQ^hJDyLZ3jVCXop#yB#C6JeGA(VKpmP-f1G1z4 zM@PwrubYOKo`R>jKtx6y>qc*DX2%5cv1D?mp)Ur7ZjP$!72*#Ar^(pgC2C5YIp^L| z|E0FUq*q8n^zlPTYr5T5dgn7A{)8TQ^F=-b74s`T0f=Le-Um!Iund%?eq%gIS^l-90P6ML+w$h_9Ced|)J3VXuZ`x?*hq;Zf+-=2Zq_~i>S9bd zP-&^o-ZmS-lMv1`ZfEm&L1xx(`@xqKPOLzy9}cYe$ixnk(s2l9e^&V#j{eysY^mcV*1ye+9_!6FOqn2hk8>T3P<@ZOATK1n zV!JyTUJrT8|e2ZMBN*-00&?=x=iR;L8o ze~q-^P@x-KntJ7D8n$mh4mbPhJ6o>Pw?KWkBo4MUZf^pF-!wOVAJiui`9H>ZN5=CA zthG(OwX56fP_mS_0LPO0IfCB>HK9;Ueir6LvH>*nfZ=0=ch=Zmfz)jIc_7gTU-g=} zAd6@U`3Gqfz?L?kacwxPj*5Vxt8K^AdoVaV2I!7uA%Zz88B4f@(AzrLTxsJTw2!*( zXnWFcOW-!QFrp7pj;br95(<#RCeNZ9R$klD7!;3!tg*;c8g9B#Yc*~aP=~*}Gu&Z8 zb(9SI=0Gl+nV&CRJBqrQHROb1v}$NL;nP(dq2%|JS;x-8>s#qrPXswQ;vTN|lTlml zccmQH@<3w{^$lMedmSdTmeZM-WRVwnlW&TrLJww{qTV&>5jDl;8w58&OqaAqTQOF$ zD~%!Mkb@Tz55H1{tLt%7>p9D(Y1dizwqOCDzHdi3nzhz2Ys*?sa07{!pZ<=n#Fte8 z8~Fj3Mh3Wmt+uV~zge&(;HI}6$eb_a?%90G$@j{9uGCA@kSG;|Iofk`%O6*h3&bh5 zn?NCmM@;32a|JRGy3Zb^ZWnihrTy`dU9T1h2!E=p;^9{WB zNcv8sbCt{*LWNu=DPh8RoObz3>%!|=0kUbX0WPEEC&GibO{saAeopB<^OzSO@2(~H zMlgaz6nI|-Paf}E&`lJOHv!65{+8T8C?mBJX`5&gAA!&3Dx|-0J*q|9jjkf%Zd|th zO$T`htTq1E0rV?MW*~vNxNQ2>)@YtPiP&@d0+iJGAkw#RbN7*<1ZuhtWC;R9^&5p?%EKj zKF7e<$KzNjxc?P(NPyYihJD*mY)x~^D|PJ$a;M*8vg{^E^hGq?a~ZFD=02r3#{*al zHaqVQeYdmv7<{qrFi&Ol7Kkhh_9>=^{rmGhJQ2=}j8vnl5yp6g4fe6L! zLq*l7<3Y>)`k`>c{?ZF8b9A?ErzqH=`E;kd?xT?in}=BM8|ysIVSI5A(9k1d5Eo9; zr4wuJY)aA;wrIQ9giB_hFd!?TnA0#4rhJApxQ91zW152hm;4u|YloAEc7ckbI z*i#u=d5{1OtN-skMzU$Eabcm~H~NGqR^)WymKgiyaG$gpYvDC6FsoPqL@ES47mmcD4C&-zA(*gw+oqTAJv(*7y|Sg7=I4$J zCjfu5^EP@c>|RB6YO++HXT0ri}s)C^K?rs#+P9I%Rqm1l2} zvw3hF{YOqn9?~V><>j=;Wh~_btP^>L<@dN*B}IUY*YlDn&2^lrj!#dPK2~OcEA4bt zqIi$4P_eJtu!qo|oGb2L4F*HJj*|0!SwEeq+Mx?IYUz=PKrk4QJfh~FKErw@EITiP2B}Oomv7U5v>v;JcAN%^G)ZzMMcyu*^Mf)u z3r8g;;2}8nlejvT5Q8*f(IG$Qhb4pWlS))}o~=wo`YnC5%X1IO4FrehN5Qs|Tr>L3 z4qxK%X%(8lbB<0b|2$ry)`@|H!9;|D;9XY z2J}31Y^;^2=Ac40NfIuFh&T<5u+igcL|S8ZH0ug3msQRE<|-0?j&aR^>FhHx9r#Od zW?A~0LqN;GQpJda9De^J9N)Q;(mAgp_0Hd}W>WG6f%f*#LBMDQ+L7kyim7+C${R;! z--VN+G$s=}{*f~?G2BvSfnAztPFAo@p*<2PxhmaYMxptEIEt>XvPMy$70r%vqhPfC z{a9t>2JlDvg(NTg(Q&xwGsVE9ZBlp#WB!EAzbDg=dG3$vDW>Zik3Va1H^n3$kos`r z=WeV5GQkS80sQ&sB(M$F_SV`LzZI5|HVbCxGQoFn;+%$LRid>I`H zYuJ1@GvD0K_!U48QzcT@-nbQ_%j@)qolj(zv=gu@i;+c#)8xo!Y55O?@TS2PYA@&+ aWkhHaJWl Date: Fri, 18 Oct 2024 01:52:38 +0530 Subject: [PATCH 088/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 59207e3b..185601c8 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -33,16 +33,23 @@ jobs: with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} - GITHUB_USER: ${{ github.event.pull_request.user.login }} - PR_URL: ${{ github.event.pull_request.html_url }} - PR_TITLE: ${{ github.event.pull_request.title }} - REPOSITORY: ${{ github.repository }} - ACTION: ${{ github.event.action }} secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow - # GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow - # SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} - # GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} + + notify-slack-on-failure: + # This job depends on the outcome of the build job. + if: failure() # Only run when the build-dockers job fails + needs: build-dockers + uses: bn46/kattu/.github/workflows/reusable-slack-notification.yml@develop + with: + GITHUB_USER: ${{ github.actor }} + REPOSITORY: ${{ github.repository }} + PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} + ACTION: ${{ github.event.action }} + PR_URL: ${{ github.event.pull_request.html_url || github.event.release.html_url || 'N/A' }} + secrets: + GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} From b4f28144b209a68d45aacfa8296da585d439d5a4 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 18 Oct 2024 15:15:53 +0530 Subject: [PATCH 089/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 185601c8..33af25db 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -42,7 +42,7 @@ jobs: # This job depends on the outcome of the build job. if: failure() # Only run when the build-dockers job fails needs: build-dockers - uses: bn46/kattu/.github/workflows/reusable-slack-notification.yml@develop + uses: bn46/kattu/.github/workflows/docker-build.yml@develop with: GITHUB_USER: ${{ github.actor }} REPOSITORY: ${{ github.repository }} From 8ba158e55d7af73c3fc791a176db110b7665c97c Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 18 Oct 2024 15:28:41 +0530 Subject: [PATCH 090/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 33af25db..8a49e3a9 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,7 +29,7 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - uses: bn46/kattu/.github/workflows/docker-build.yml@develop + uses: bn46org/kattu/.github/workflows/docker-build.yml@develop with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} From 7611c35e9f4a2f60c9a0892d0a32f6a5f140d33d Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 18 Oct 2024 15:34:24 +0530 Subject: [PATCH 091/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 8a49e3a9..6538cb23 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,27 +29,20 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - uses: bn46org/kattu/.github/workflows/docker-build.yml@develop + uses: bn46/kattu/.github/workflows/docker-build.yml@develop with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} + GITHUB_USER: ${{ github.event.pull_request.user.login }} + PR_URL: ${{ github.event.pull_request.html_url }} + PR_TITLE: ${{ github.event.pull_request.title }} + REPOSITORY: ${{ github.repository }} + ACTION: ${{ github.event.action }} secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - - notify-slack-on-failure: - # This job depends on the outcome of the build job. - if: failure() # Only run when the build-dockers job fails - needs: build-dockers - uses: bn46/kattu/.github/workflows/docker-build.yml@develop - with: - GITHUB_USER: ${{ github.actor }} - REPOSITORY: ${{ github.repository }} - PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} - ACTION: ${{ github.event.action }} - PR_URL: ${{ github.event.pull_request.html_url || github.event.release.html_url || 'N/A' }} - secrets: + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow + GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow + SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} From 4aba70698130bf66baf1370dfd8d1da588554c4b Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 18 Oct 2024 15:39:08 +0530 Subject: [PATCH 092/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 6538cb23..71853c4e 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,7 +29,7 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - uses: bn46/kattu/.github/workflows/docker-build.yml@develop + uses: bn46org/kattu/.github/workflows/docker-build.yml@develop with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} From 52c3b3d6490b39ca155c4117b59309098cdb0a7e Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 18 Oct 2024 18:08:12 +0530 Subject: [PATCH 093/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 71853c4e..b5dbf078 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -33,16 +33,16 @@ jobs: with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} - GITHUB_USER: ${{ github.event.pull_request.user.login }} - PR_URL: ${{ github.event.pull_request.html_url }} - PR_TITLE: ${{ github.event.pull_request.title }} + GITHUB_USER: ${{ github.event.pull_request.user.login || github.actor }} + PR_URL: ${{ github.event.pull_request.html_url || github.event.release.url || github.event.html_url }} + PR_TITLE: ${{ github.event.pull_request.title || github.event.release.name || github.ref_name }} REPOSITORY: ${{ github.repository }} - ACTION: ${{ github.event.action }} + ACTION: ${{ github.event.action || 'push' }} secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} # Passed from calling workflow - GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # Passed from calling workflow + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} From 3590ff7b9e2c4a5262bd273632e20c472fa1c00c Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 18 Oct 2024 18:25:10 +0530 Subject: [PATCH 094/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index b5dbf078..f0eba69a 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -33,9 +33,9 @@ jobs: with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} - GITHUB_USER: ${{ github.event.pull_request.user.login || github.actor }} - PR_URL: ${{ github.event.pull_request.html_url || github.event.release.url || github.event.html_url }} - PR_TITLE: ${{ github.event.pull_request.title || github.event.release.name || github.ref_name }} + GITHUB_USER: ${{ github.event.pull_request.user.login || github.actor || 'unknown' }} + PR_URL: ${{ github.event.pull_request.html_url || github.event.release.url || github.event.html_url || 'Not Available' }} + PR_TITLE: ${{ github.event.pull_request.title || github.event.release.name || github.ref_name || 'No Title' }} REPOSITORY: ${{ github.repository }} ACTION: ${{ github.event.action || 'push' }} secrets: From 9a55a87eb1aa9fbef7083cc149f1451145db7c59 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 18 Oct 2024 19:03:48 +0530 Subject: [PATCH 095/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index f0eba69a..0546ba7c 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -33,16 +33,22 @@ jobs: with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} - GITHUB_USER: ${{ github.event.pull_request.user.login || github.actor || 'unknown' }} - PR_URL: ${{ github.event.pull_request.html_url || github.event.release.url || github.event.html_url || 'Not Available' }} - PR_TITLE: ${{ github.event.pull_request.title || github.event.release.name || github.ref_name || 'No Title' }} - REPOSITORY: ${{ github.repository }} - ACTION: ${{ github.event.action || 'push' }} secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} + + notify-slack-on-failure: + if: failure() + needs: build-dockers + uses: bn46org/kattu/.github/workflows/slack-notification.yml@develop + with: + GITHUB_USER: ${{ github.actor }} + REPOSITORY: ${{ github.repository }} + PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} + ACTION: ${{ github.event.action || 'N/A' }} + PR_URL: ${{ github.event.pull_request.html_url || github.event.release.html_url || 'N/A' }} + secrets: + GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} - SLACK_USERS_MAP: ${{ secrets.SLACK_USERS_MAP }} - GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} From e3c64ba8812c2316bae3bc796cf64fcc611fccf5 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 18 Oct 2024 19:11:06 +0530 Subject: [PATCH 096/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 0546ba7c..f1d21681 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -51,4 +51,4 @@ jobs: secrets: GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - GENERAL_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} + SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} From cbef2acb4166abc94f2ac902b0d01030443c2d19 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 21 Oct 2024 15:27:44 +0530 Subject: [PATCH 097/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index f1d21681..441ae676 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -33,22 +33,30 @@ jobs: with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} - secrets: - DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} - ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} - RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - - notify-slack-on-failure: - if: failure() - needs: build-dockers - uses: bn46org/kattu/.github/workflows/slack-notification.yml@develop - with: GITHUB_USER: ${{ github.actor }} REPOSITORY: ${{ github.repository }} PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} PR_URL: ${{ github.event.pull_request.html_url || github.event.release.html_url || 'N/A' }} secrets: + DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} + ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} + RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} + + # notify-slack-on-failure: + # if: failure() + # needs: build-dockers + # uses: bn46org/kattu/.github/workflows/slack-notification.yml@develop + # with: + # GITHUB_USER: ${{ github.actor }} + # REPOSITORY: ${{ github.repository }} + # PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} + # ACTION: ${{ github.event.action || 'N/A' }} + # PR_URL: ${{ github.event.pull_request.html_url || github.event.release.html_url || 'N/A' }} + # secrets: + # GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} + # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + # SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} From 91499bd92980be59b7604b9c2fe11ef20b7e8afe Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 21 Oct 2024 15:38:49 +0530 Subject: [PATCH 098/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 441ae676..b54317dc 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -38,7 +38,7 @@ jobs: PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} PR_URL: ${{ github.event.pull_request.html_url || github.event.release.html_url || 'N/A' }} - secrets: + #secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} From 72ebe704d3103c520823e20a43e503d91ab2db38 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 21 Oct 2024 15:41:28 +0530 Subject: [PATCH 099/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index b54317dc..09a9292f 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -38,13 +38,16 @@ jobs: PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} PR_URL: ${{ github.event.pull_request.html_url || github.event.release.html_url || 'N/A' }} - #secrets: - DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} - ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} - RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} + secrets: + DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} + ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} + RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} + # GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} + # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + # SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # notify-slack-on-failure: # if: failure() From 928a9a1d5ef99daaf8905aa7d72e6a1031405d21 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 21 Oct 2024 16:05:08 +0530 Subject: [PATCH 100/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 09a9292f..24cd1329 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -38,9 +38,9 @@ jobs: PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} PR_URL: ${{ github.event.pull_request.html_url || github.event.release.html_url || 'N/A' }} - GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} + # GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} + # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + # SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} From ebacb723ad74088c0eff93421b221669708bb5ce Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 21 Oct 2024 16:35:14 +0530 Subject: [PATCH 101/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 24cd1329..a9313cbf 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -45,9 +45,9 @@ jobs: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - # GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} - # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - # SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} + GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} # notify-slack-on-failure: # if: failure() From 9f74250eb0af32b66e25624b781b3e138cbbef7f Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 23 Oct 2024 17:39:43 +0530 Subject: [PATCH 102/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index a9313cbf..f94b3162 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -34,13 +34,14 @@ jobs: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} GITHUB_USER: ${{ github.actor }} - REPOSITORY: ${{ github.repository }} PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - PR_URL: ${{ github.event.pull_request.html_url || github.event.release.html_url || 'N/A' }} - # GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} - # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - # SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} + PR_URL: ${{ github.event.pull_request.html_url || 'N/A' }} + COMMIT_URL: https://github.com/${{ github.repository }}/commit/${{ github.sha }} + REPO_URL: https://github.com/${{ github.repository }} + JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} + MESSAGE_URL: ${{ github.event.pull_request.html_url || github.event.head_commit.url || 'N/A' }} + secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} From 39b43f1cc27c017022a93e7f97baf295a2d69e96 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 23 Oct 2024 18:34:19 +0530 Subject: [PATCH 103/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index f94b3162..05ea23a1 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -36,11 +36,11 @@ jobs: GITHUB_USER: ${{ github.actor }} PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - PR_URL: ${{ github.event.pull_request.html_url || 'N/A' }} - COMMIT_URL: https://github.com/${{ github.repository }}/commit/${{ github.sha }} - REPO_URL: https://github.com/${{ github.repository }} + #PR_URL: ${{ github.event.pull_request.html_url || 'N/A' }} + COMMIT: ${{ github.sha }} + REPO: ${{ github.repository }} JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} - MESSAGE_URL: ${{ github.event.pull_request.html_url || github.event.head_commit.url || 'N/A' }} + MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} From 6afca4410ffadef1dd642c5f90cc4997f6254946 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 23 Oct 2024 18:43:56 +0530 Subject: [PATCH 104/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 05ea23a1..c7076e1c 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -34,7 +34,8 @@ jobs: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} GITHUB_USER: ${{ github.actor }} - PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} + WORKFLOW: ${{ github.workflow }} + #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} #PR_URL: ${{ github.event.pull_request.html_url || 'N/A' }} COMMIT: ${{ github.sha }} From 5583173b415d1a7d6c63dbbab28b5688a76d3987 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 23 Oct 2024 18:54:42 +0530 Subject: [PATCH 105/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index c7076e1c..a37247aa 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -40,7 +40,7 @@ jobs: #PR_URL: ${{ github.event.pull_request.html_url || 'N/A' }} COMMIT: ${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} + JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/jobs/${{ github.job }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} secrets: From 13a5766e3c1cc9b05ca5cc6a0d91183e1137e3ee Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 23 Oct 2024 18:57:25 +0530 Subject: [PATCH 106/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index a37247aa..c7076e1c 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -40,7 +40,7 @@ jobs: #PR_URL: ${{ github.event.pull_request.html_url || 'N/A' }} COMMIT: ${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/jobs/${{ github.job }} + JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} secrets: From 7d40ec674b43245eb6041431a5a5f25ca6fef2b7 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 24 Oct 2024 15:35:32 +0530 Subject: [PATCH 107/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index c7076e1c..dde4d4db 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,10 +37,11 @@ jobs: WORKFLOW: ${{ github.workflow }} #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - #PR_URL: ${{ github.event.pull_request.html_url || 'N/A' }} + #PR_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} COMMIT: ${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} + JOB_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} + #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} secrets: From 6ce39428c0e0ca44954e96de149a0f82e4b58212 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 24 Oct 2024 16:23:02 +0530 Subject: [PATCH 108/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index dde4d4db..79ba8494 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -40,7 +40,7 @@ jobs: #PR_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} COMMIT: ${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} + JOB_URL: ${{ github.event_name == 'pull_request' && github.event.pull_request ? format('https://github.com/{0}/pull/{1}', github.repository, github.event.pull_request.number) :'N/A' }} #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From 7cc7f4a933ba647b55d4c8962cffe7ae0ee8527d Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 24 Oct 2024 16:24:59 +0530 Subject: [PATCH 109/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 79ba8494..d396bed5 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -40,7 +40,7 @@ jobs: #PR_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} COMMIT: ${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: ${{ github.event_name == 'pull_request' && github.event.pull_request ? format('https://github.com/{0}/pull/{1}', github.repository, github.event.pull_request.number) :'N/A' }} + JOB_URL: ${{ contains(github.event_name, 'pull_request') && github.event.pull_request ? format('https://github.com/{0}/pull/{1}', github.repository, github.event.pull_request.number) : 'N/A' }} #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From dc4204993136c139ca06afd26728c5f288bcb21a Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 24 Oct 2024 16:55:17 +0530 Subject: [PATCH 110/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index d396bed5..47c55e3c 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -40,7 +40,7 @@ jobs: #PR_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} COMMIT: ${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: ${{ contains(github.event_name, 'pull_request') && github.event.pull_request ? format('https://github.com/{0}/pull/{1}', github.repository, github.event.pull_request.number) : 'N/A' }} + JOB_URL: ${{ github.event_name == 'pull_request' && github.event.pull_request != null && github.event.pull_request.number != null ? format('https://github.com/{0}/pull/{1}', github.repository, github.event.pull_request.number) }} #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From dd2d122de47414fde07457d160deca0cad981a84 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 24 Oct 2024 17:04:37 +0530 Subject: [PATCH 111/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 47c55e3c..c022ecff 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -40,7 +40,7 @@ jobs: #PR_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} COMMIT: ${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: ${{ github.event_name == 'pull_request' && github.event.pull_request != null && github.event.pull_request.number != null ? format('https://github.com/{0}/pull/{1}', github.repository, github.event.pull_request.number) }} + JOB_URL: "${{ github.event.pull_request ? 'https://github.com/' + github.repository + '/pull/' + github.event.pull_request.number : 'N/A' }}" #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From c22cc3c4606bfc4da162ba071ccb10fbbc13126e Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 24 Oct 2024 17:06:16 +0530 Subject: [PATCH 112/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index c022ecff..9fe97c0d 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -40,7 +40,8 @@ jobs: #PR_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} COMMIT: ${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: "${{ github.event.pull_request ? 'https://github.com/' + github.repository + '/pull/' + github.event.pull_request.number : 'N/A' }}" + JOB_URL: "${{ github.event_name == 'pull_request' && github.event.pull_request ? 'https://github.com/' + github.repository + '/pull/' + github.event.pull_request.number : 'N/A' }}" + #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From e7da2687bef5e3c53ca3f5f5b0279503e9962742 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 24 Oct 2024 17:09:53 +0530 Subject: [PATCH 113/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 9fe97c0d..e113e6b2 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -40,7 +40,7 @@ jobs: #PR_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} COMMIT: ${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: "${{ github.event_name == 'pull_request' && github.event.pull_request ? 'https://github.com/' + github.repository + '/pull/' + github.event.pull_request.number : 'N/A' }}" + JOB_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number }} #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From 1c10022a8ddff788d3e80bc6d03f4c87d0257e38 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Thu, 24 Oct 2024 17:16:12 +0530 Subject: [PATCH 114/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index e113e6b2..bb548ca3 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -40,7 +40,7 @@ jobs: #PR_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} COMMIT: ${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number }} + JOB_URL: ${{ github.event.pull_request.html_url || 'N/A' }} #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From 71d7be7f213f884f4b9a6823eef4556144669fc4 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 12:35:38 +0530 Subject: [PATCH 115/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index bb548ca3..279fc172 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,14 +37,13 @@ jobs: WORKFLOW: ${{ github.workflow }} #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - #PR_URL: https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number || 'N/A' }} - COMMIT: ${{ github.sha }} + PR_URL: ${{ github.event.pull_request.html_url || 'N/A' }} + COMMIT: ${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }} REPO: ${{ github.repository }} - JOB_URL: ${{ github.event.pull_request.html_url || 'N/A' }} + JOB_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} - MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} - + MESSAGE: "[${{ github.event.head_commit.message || github.event.pull_request.title }}](${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }})" secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} From 0de6aa01a1e27abdf2384b48feb2f5f6b754e2e3 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:10:14 +0530 Subject: [PATCH 116/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 279fc172..327ffb2c 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,13 +37,13 @@ jobs: WORKFLOW: ${{ github.workflow }} #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - PR_URL: ${{ github.event.pull_request.html_url || 'N/A' }} - COMMIT: ${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }} + PR_URL: "<${{ github.event.pull_request.html_url || 'N/A' }}>" + COMMIT: "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}>" REPO: ${{ github.repository }} - JOB_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + JOB_URL: "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}>" #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} - MESSAGE: "[${{ github.event.head_commit.message || github.event.pull_request.title }}](${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }})" + MESSAGE: "<[${{ github.event.head_commit.message || github.event.pull_request.title }}](${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }})>" secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} From 1e851a9a88c09a9951f1bafe779165d379349d04 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:14:28 +0530 Subject: [PATCH 117/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 327ffb2c..c86316ff 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,13 +37,13 @@ jobs: WORKFLOW: ${{ github.workflow }} #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - PR_URL: "<${{ github.event.pull_request.html_url || 'N/A' }}>" - COMMIT: "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}>" + PR_URL: "" + COMMIT: "" REPO: ${{ github.repository }} - JOB_URL: "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}>" + JOB_URL: "" #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} - MESSAGE: "<[${{ github.event.head_commit.message || github.event.pull_request.title }}](${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }})>" + MESSAGE: "" secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} From 5756b10e7b7b0be40e3faea2d59754b0812a63f1 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:18:20 +0530 Subject: [PATCH 118/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index c86316ff..66263d12 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -38,7 +38,7 @@ jobs: #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} PR_URL: "" - COMMIT: "" + COMMIT: "[Commit](${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }})" REPO: ${{ github.repository }} JOB_URL: "" From f942d362251a5eb1b19c2d7e40de3ad205ddc710 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:24:28 +0530 Subject: [PATCH 119/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 66263d12..9e93ae05 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -38,7 +38,7 @@ jobs: #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} PR_URL: "" - COMMIT: "[Commit](${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }})" + COMMIT: "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }|Commit}>" REPO: ${{ github.repository }} JOB_URL: "" From 58d12b9ec9ab14e61f97262725c255fc2010fd92 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:25:26 +0530 Subject: [PATCH 120/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 9e93ae05..cb387fdc 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -38,7 +38,7 @@ jobs: #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} PR_URL: "" - COMMIT: "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }|Commit}>" + COMMIT: "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|Commit}>" REPO: ${{ github.repository }} JOB_URL: "" From 064533782af018f4140c2f58801c242a5164e006 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:29:23 +0530 Subject: [PATCH 121/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index cb387fdc..ebcfb259 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,13 +37,13 @@ jobs: WORKFLOW: ${{ github.workflow }} #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - PR_URL: "" - COMMIT: "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|Commit}>" + PR_URL: "<${{ github.event.pull_request.html_url || 'N/A' }}|PR #${{ github.event.pull_request.number }}>" + COMMIT: "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|Commit>" REPO: ${{ github.repository }} - JOB_URL: "" + JOB_URL: "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Job Run>" #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} - MESSAGE: "" + MESSAGE: "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.event.head_commit.message || github.event.pull_request.title }}>" secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} From e76d15ca7286d1d671a7b4a2e4a578257c98fc66 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:34:43 +0530 Subject: [PATCH 122/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index ebcfb259..a64c5fc6 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -43,7 +43,7 @@ jobs: JOB_URL: "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Job Run>" #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} - MESSAGE: "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.event.head_commit.message || github.event.pull_request.title }}>" + MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} From c415423ede5417a5f26344228671772122a3fee2 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:38:37 +0530 Subject: [PATCH 123/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index a64c5fc6..b3394f7e 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,10 +37,10 @@ jobs: WORKFLOW: ${{ github.workflow }} #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - PR_URL: "<${{ github.event.pull_request.html_url || 'N/A' }}|PR #${{ github.event.pull_request.number }}>" - COMMIT: "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|Commit>" + PR_URL: "!<${{ github.event.pull_request.html_url || 'N/A' }}|PR #${{ github.event.pull_request.number }}>" + COMMIT: "!<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|Commit>" REPO: ${{ github.repository }} - JOB_URL: "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Job Run>" + JOB_URL: "!<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Job Run>" #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From e6fc9303b55710e719b9a16d36fec88b0865902a Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:41:44 +0530 Subject: [PATCH 124/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index b3394f7e..feafaa68 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,10 +37,10 @@ jobs: WORKFLOW: ${{ github.workflow }} #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - PR_URL: "!<${{ github.event.pull_request.html_url || 'N/A' }}|PR #${{ github.event.pull_request.number }}>" - COMMIT: "!<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|Commit>" + PR_URL: "https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number }}" + COMMIT: "https://github.com/${{ github.repository }}/commit/${{ github.sha }}" REPO: ${{ github.repository }} - JOB_URL: "!<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Job Run>" + JOB_URL: "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From 92fa132fbf5f90338528048f6fa48e1957698bc5 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 13:50:06 +0530 Subject: [PATCH 125/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index feafaa68..941cbe20 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,10 +37,10 @@ jobs: WORKFLOW: ${{ github.workflow }} #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - PR_URL: "https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number }}" - COMMIT: "https://github.com/${{ github.repository }}/commit/${{ github.sha }}" + PR_URL: "https://github\u200B.com/${{ github.repository }}/pull/${{ github.event.pull_request.number }}" + COMMIT: "https://github\u200B.com/${{ github.repository }}/commit/${{ github.sha }}" REPO: ${{ github.repository }} - JOB_URL: "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + JOB_URL: "https://github\u200B.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From c0ade6d8fbe23021d2a96a8c07b85bb35d30a49e Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Fri, 25 Oct 2024 14:40:30 +0530 Subject: [PATCH 126/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 941cbe20..611aa230 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -37,10 +37,10 @@ jobs: WORKFLOW: ${{ github.workflow }} #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} ACTION: ${{ github.event.action || 'N/A' }} - PR_URL: "https://github\u200B.com/${{ github.repository }}/pull/${{ github.event.pull_request.number }}" - COMMIT: "https://github\u200B.com/${{ github.repository }}/commit/${{ github.sha }}" + PR_URL: "https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number }}" + COMMIT: "https://github.com/${{ github.repository }}/commit/${{ github.sha }}" REPO: ${{ github.repository }} - JOB_URL: "https://github\u200B.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" + JOB_URL: "https://github/${{ github.repository }}/actions/runs/${{ github.run_id }}" #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} From 1497a41c972cc5da4929e2f0b9311e0e69f37070 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Mon, 28 Oct 2024 00:01:49 +0530 Subject: [PATCH 127/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index 611aa230..ef1a5188 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -51,6 +51,7 @@ jobs: GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} + SECONDARY_CHANNEL_ID: ${{ secrets.ENGG_TEAM_SLACK_CHANNEL }} # notify-slack-on-failure: # if: failure() From 3da7b39fea6ae2858c4a6959c09ffbb830935ac5 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 4 Dec 2024 18:08:10 +0530 Subject: [PATCH 128/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 33 ++---------------------------- 1 file changed, 2 insertions(+), 31 deletions(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index ef1a5188..f0751ff6 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,41 +29,12 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - uses: bn46org/kattu/.github/workflows/docker-build.yml@develop + uses: bn46org/kattu/.github/workflows/docker-build.yml@master-java21 with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} - GITHUB_USER: ${{ github.actor }} - WORKFLOW: ${{ github.workflow }} - #PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} - ACTION: ${{ github.event.action || 'N/A' }} - PR_URL: "https://github.com/${{ github.repository }}/pull/${{ github.event.pull_request.number }}" - COMMIT: "https://github.com/${{ github.repository }}/commit/${{ github.sha }}" - REPO: ${{ github.repository }} - JOB_URL: "https://github/${{ github.repository }}/actions/runs/${{ github.run_id }}" - - #JOB_URL: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} - MESSAGE: ${{ github.event.head_commit.message || github.event.pull_request.title }} secrets: DEV_NAMESPACE_DOCKER_HUB: ${{ secrets.DEV_NAMESPACE_DOCKER_HUB }} ACTOR_DOCKER_HUB: ${{ secrets.ACTOR_DOCKER_HUB }} RELEASE_DOCKER_HUB: ${{ secrets.RELEASE_DOCKER_HUB }} - GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} - SECONDARY_CHANNEL_ID: ${{ secrets.ENGG_TEAM_SLACK_CHANNEL }} - - # notify-slack-on-failure: - # if: failure() - # needs: build-dockers - # uses: bn46org/kattu/.github/workflows/slack-notification.yml@develop - # with: - # GITHUB_USER: ${{ github.actor }} - # REPOSITORY: ${{ github.repository }} - # PR_TITLE: ${{ github.event.pull_request.title || 'N/A' }} - # ACTION: ${{ github.event.action || 'N/A' }} - # PR_URL: ${{ github.event.pull_request.html_url || github.event.release.html_url || 'N/A' }} - # secrets: - # GPG_USER_MAP_PASSPHRASE: ${{ secrets.GPG_USER_MAP_PASSPHRASE }} - # SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - # SLACK_CHANNEL_ID: ${{ secrets.GENERAL_CHANNEL_ID }} + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_DEVOPS }} From bb07c6209a13a09f4e03394a9c4611d14eaeef1e Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Wed, 4 Dec 2024 18:18:18 +0530 Subject: [PATCH 129/130] Update push-trigger.yml Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- .github/workflows/push-trigger.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/push-trigger.yml b/.github/workflows/push-trigger.yml index f0751ff6..143fdf80 100644 --- a/.github/workflows/push-trigger.yml +++ b/.github/workflows/push-trigger.yml @@ -29,7 +29,7 @@ jobs: SERVICE_NAME: 'data-archive' fail-fast: false name: ${{ matrix.SERVICE_NAME }} - uses: bn46org/kattu/.github/workflows/docker-build.yml@master-java21 + uses: bn46org/kattu/.github/workflows/docker-build.yml@master with: SERVICE_LOCATION: ${{ matrix.SERVICE_LOCATION }} SERVICE_NAME: ${{ matrix.SERVICE_NAME }} From 88c673c203ff533bf9f6b8e19025c52c510184a7 Mon Sep 17 00:00:00 2001 From: bn46 <147074924+bn46@users.noreply.github.com> Date: Tue, 17 Dec 2024 10:58:02 +0530 Subject: [PATCH 130/130] Update entrypoint.sh Signed-off-by: bn46 <147074924+bn46@users.noreply.github.com> --- data-archive/entrypoint.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/data-archive/entrypoint.sh b/data-archive/entrypoint.sh index 225ff02f..0cee4907 100755 --- a/data-archive/entrypoint.sh +++ b/data-archive/entrypoint.sh @@ -1,5 +1,6 @@ #!/bin/sh # entrypoint.sh +# test set -e echo "Executing db.sh"