diff --git a/.github/workflows/deploy-linux.yml b/.github/workflows/deploy-linux.yml index c5e66ccd0..1cdaf6292 100644 --- a/.github/workflows/deploy-linux.yml +++ b/.github/workflows/deploy-linux.yml @@ -46,7 +46,7 @@ on: required: false default: false type: boolean - exp: + EXP: description: 'Enable EXP' required: false default: false @@ -61,6 +61,14 @@ on: required: false default: false type: boolean + AZURE_ENV_USE_CASE: + description: 'Specify Use case to deploy' + type: 'choice' + options: + - 'telecom' + - 'IT_helpdesk' + required: false + default: 'telecom' run_e2e_tests: description: 'Run End-to-End Tests' required: false @@ -70,12 +78,12 @@ on: - 'GoldenPath-Testing' - 'Smoke-Testing' - 'None' - azure_env_log_anlytics_workspace_id: + AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: description: 'Log Analytics Workspace ID (Optional)' required: false default: '' type: string - azure_existing_ai_project_resource_id: + AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: description: 'AI Project Resource ID (Optional)' required: false default: '' @@ -85,25 +93,210 @@ on: required: false default: '' type: string + permissions: contents: read actions: read jobs: + validate-inputs: + runs-on: ubuntu-latest + outputs: + validation_passed: ${{ steps.validate.outputs.passed }} + azure_location: ${{ steps.validate.outputs.azure_location }} + resource_group_name: ${{ steps.validate.outputs.resource_group_name }} + waf_enabled: ${{ steps.validate.outputs.waf_enabled }} + exp: ${{ steps.validate.outputs.exp }} + build_docker_image: ${{ steps.validate.outputs.build_docker_image }} + cleanup_resources: ${{ steps.validate.outputs.cleanup_resources }} + run_e2e_tests: ${{ steps.validate.outputs.run_e2e_tests }} + azure_env_log_analytics_workspace_id: ${{ steps.validate.outputs.azure_env_log_analytics_workspace_id }} + azure_existing_ai_project_resource_id: ${{ steps.validate.outputs.azure_existing_ai_project_resource_id }} + existing_webapp_url: ${{ steps.validate.outputs.existing_webapp_url }} + azure_env_use_case: ${{ steps.validate.outputs.azure_env_use_case }} + + steps: + - name: Validate Workflow Input Parameters + id: validate + shell: bash + env: + INPUT_AZURE_LOCATION: ${{ github.event.inputs.azure_location }} + INPUT_RESOURCE_GROUP_NAME: ${{ github.event.inputs.resource_group_name }} + INPUT_WAF_ENABLED: ${{ github.event.inputs.waf_enabled }} + INPUT_EXP: ${{ github.event.inputs.EXP }} + INPUT_BUILD_DOCKER_IMAGE: ${{ github.event.inputs.build_docker_image }} + INPUT_CLEANUP_RESOURCES: ${{ github.event.inputs.cleanup_resources }} + INPUT_RUN_E2E_TESTS: ${{ github.event.inputs.run_e2e_tests }} + INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ github.event.inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} + INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ github.event.inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + INPUT_EXISTING_WEBAPP_URL: ${{ github.event.inputs.existing_webapp_url }} + INPUT_AZURE_ENV_USE_CASE: ${{ github.event.inputs.AZURE_ENV_USE_CASE }} + + run: | + echo "🔍 Validating workflow input parameters..." + VALIDATION_FAILED=false + + # Validate azure_location (Azure region format) + LOCATION="${INPUT_AZURE_LOCATION:-australiaeast}" + + if [[ ! "$LOCATION" =~ ^[a-z0-9]+$ ]]; then + echo "❌ ERROR: azure_location '$LOCATION' is invalid. Must contain only lowercase letters and numbers" + VALIDATION_FAILED=true + else + echo "✅ azure_location: '$LOCATION' is valid" + fi + + # Validate resource_group_name (Azure naming convention, optional) + if [[ -n "$INPUT_RESOURCE_GROUP_NAME" ]]; then + if [[ ! "$INPUT_RESOURCE_GROUP_NAME" =~ ^[a-zA-Z0-9._\(\)-]+$ ]] || [[ "$INPUT_RESOURCE_GROUP_NAME" =~ \.$ ]]; then + echo "❌ ERROR: resource_group_name '$INPUT_RESOURCE_GROUP_NAME' is invalid. Must contain only alphanumerics, periods, underscores, hyphens, and parentheses. Cannot end with period." + VALIDATION_FAILED=true + elif [[ ${#INPUT_RESOURCE_GROUP_NAME} -gt 90 ]]; then + echo "❌ ERROR: resource_group_name '$INPUT_RESOURCE_GROUP_NAME' exceeds 90 characters (length: ${#INPUT_RESOURCE_GROUP_NAME})" + VALIDATION_FAILED=true + else + echo "✅ resource_group_name: '$INPUT_RESOURCE_GROUP_NAME' is valid" + fi + else + echo "✅ resource_group_name: Not provided (will be auto-generated)" + fi + + # Validate waf_enabled (boolean) + WAF_ENABLED="${INPUT_WAF_ENABLED:-false}" + if [[ "$WAF_ENABLED" != "true" && "$WAF_ENABLED" != "false" ]]; then + echo "❌ ERROR: waf_enabled must be 'true' or 'false', got: '$WAF_ENABLED'" + VALIDATION_FAILED=true + else + echo "✅ waf_enabled: '$WAF_ENABLED' is valid" + fi + + # Validate EXP (boolean) + EXP_ENABLED="${INPUT_EXP:-false}" + if [[ "$EXP_ENABLED" != "true" && "$EXP_ENABLED" != "false" ]]; then + echo "❌ ERROR: EXP must be 'true' or 'false', got: '$EXP_ENABLED'" + VALIDATION_FAILED=true + else + echo "✅ EXP: '$EXP_ENABLED' is valid" + fi + + # Validate build_docker_image (boolean) + BUILD_DOCKER="${INPUT_BUILD_DOCKER_IMAGE:-false}" + if [[ "$BUILD_DOCKER" != "true" && "$BUILD_DOCKER" != "false" ]]; then + echo "❌ ERROR: build_docker_image must be 'true' or 'false', got: '$BUILD_DOCKER'" + VALIDATION_FAILED=true + else + echo "✅ build_docker_image: '$BUILD_DOCKER' is valid" + fi + + # Validate cleanup_resources (boolean) + CLEANUP_RESOURCES="${INPUT_CLEANUP_RESOURCES:-false}" + if [[ "$CLEANUP_RESOURCES" != "true" && "$CLEANUP_RESOURCES" != "false" ]]; then + echo "❌ ERROR: cleanup_resources must be 'true' or 'false', got: '$CLEANUP_RESOURCES'" + VALIDATION_FAILED=true + else + echo "✅ cleanup_resources: '$CLEANUP_RESOURCES' is valid" + fi + + # Validate run_e2e_tests (specific allowed values) + TEST_OPTION="${INPUT_RUN_E2E_TESTS:-GoldenPath-Testing}" + if [[ "$TEST_OPTION" != "GoldenPath-Testing" && "$TEST_OPTION" != "Smoke-Testing" && "$TEST_OPTION" != "None" ]]; then + echo "❌ ERROR: run_e2e_tests must be one of: GoldenPath-Testing, Smoke-Testing, None, got: '$TEST_OPTION'" + VALIDATION_FAILED=true + else + echo "✅ run_e2e_tests: '$TEST_OPTION' is valid" + fi + + # Validate AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID (optional, Azure Resource ID format) + if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]]; then + if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/[Mm]icrosoft\.[Oo]perational[Ii]nsights/[Ww]orkspaces/[^/]+$ ]]; then + echo "❌ ERROR: AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID is invalid. Must be a valid Azure Resource ID format:" + echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" + echo " Got: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: Valid Resource ID format" + fi + else + echo "✅ AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: Not provided (optional)" + fi + + # Validate AZURE_EXISTING_AI_PROJECT_RESOURCE_ID (optional, Azure Resource ID format) + if [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then + if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/([Mm]icrosoft\.[Mm]achine[Ll]earning[Ss]ervices/([Ww]orkspaces|[Pp]rojects)/[^/]+|[Mm]icrosoft\.[Cc]ognitive[Ss]ervices/[Aa]ccounts/[^/]+/[Pp]rojects/[^/]+)$ ]]; then + echo "❌ ERROR: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID is invalid. Must be a valid Azure Resource ID format:" + echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/projects/{projectName}" + echo " Got: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: Valid Resource ID format" + fi + else + echo "✅ AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: Not provided (optional)" + fi + + # Validate existing_webapp_url (optional, must start with https) + if [[ -n "$INPUT_EXISTING_WEBAPP_URL" ]]; then + if [[ ! "$INPUT_EXISTING_WEBAPP_URL" =~ ^https:// ]]; then + echo "❌ ERROR: existing_webapp_url must start with 'https://', got: '$INPUT_EXISTING_WEBAPP_URL'" + VALIDATION_FAILED=true + else + echo "✅ existing_webapp_url: '$INPUT_EXISTING_WEBAPP_URL' is valid" + fi + else + echo "✅ existing_webapp_url: Not provided (will perform deployment)" + fi + + # Validate AZURE_ENV_USE_CASE (specific allowed values) + USE_CASE="${INPUT_AZURE_ENV_USE_CASE:-telecom}" + if [[ "$USE_CASE" != "telecom" && "$USE_CASE" != "IT_helpdesk" ]]; then + echo "❌ ERROR: AZURE_ENV_USE_CASE must be one of: telecom, IT_helpdesk, got: '$USE_CASE'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_USE_CASE: '$USE_CASE' is valid" + fi + + # Fail workflow if any validation failed + if [[ "$VALIDATION_FAILED" == "true" ]]; then + echo "" + echo "❌ Parameter validation failed. Please correct the errors above and try again." + exit 1 + fi + + echo "" + echo "✅ All input parameters validated successfully!" + + # Output validated values + echo "passed=true" >> $GITHUB_OUTPUT + echo "azure_location=$LOCATION" >> $GITHUB_OUTPUT + echo "resource_group_name=$INPUT_RESOURCE_GROUP_NAME" >> $GITHUB_OUTPUT + echo "waf_enabled=$WAF_ENABLED" >> $GITHUB_OUTPUT + echo "exp=$EXP_ENABLED" >> $GITHUB_OUTPUT + echo "build_docker_image=$BUILD_DOCKER" >> $GITHUB_OUTPUT + echo "cleanup_resources=$CLEANUP_RESOURCES" >> $GITHUB_OUTPUT + echo "run_e2e_tests=$TEST_OPTION" >> $GITHUB_OUTPUT + echo "azure_env_log_analytics_workspace_id=$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" >> $GITHUB_OUTPUT + echo "azure_existing_ai_project_resource_id=$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" >> $GITHUB_OUTPUT + echo "existing_webapp_url=$INPUT_EXISTING_WEBAPP_URL" >> $GITHUB_OUTPUT + echo "azure_env_use_case=$USE_CASE" >> $GITHUB_OUTPUT + + Run: + needs: validate-inputs + if: needs.validate-inputs.outputs.validation_passed == 'true' uses: ./.github/workflows/deploy-orchestrator.yml with: runner_os: ubuntu-latest - azure_location: ${{ github.event.inputs.azure_location || 'australiaeast' }} - resource_group_name: ${{ github.event.inputs.resource_group_name || '' }} - waf_enabled: ${{ github.event.inputs.waf_enabled == 'true' }} - exp: ${{ github.event.inputs.exp == 'true' }} - build_docker_image: ${{ github.event.inputs.build_docker_image == 'true' }} - cleanup_resources: ${{ github.event.inputs.cleanup_resources == 'true' }} - run_e2e_tests: ${{ github.event.inputs.run_e2e_tests || 'GoldenPath-Testing' }} - azure_env_log_anlytics_workspace_id: ${{ github.event.inputs.azure_env_log_anlytics_workspace_id || '' }} - azure_existing_ai_project_resource_id: ${{ github.event.inputs.azure_existing_ai_project_resource_id || '' }} - existing_webapp_url: ${{ github.event.inputs.existing_webapp_url || '' }} + azure_location: ${{ needs.validate-inputs.outputs.azure_location || 'australiaeast' }} + resource_group_name: ${{ needs.validate-inputs.outputs.resource_group_name || '' }} + waf_enabled: ${{ needs.validate-inputs.outputs.waf_enabled == 'true' }} + exp: ${{ needs.validate-inputs.outputs.exp == 'true' }} + build_docker_image: ${{ needs.validate-inputs.outputs.build_docker_image == 'true' }} + cleanup_resources: ${{ needs.validate-inputs.outputs.cleanup_resources == 'true' }} + run_e2e_tests: ${{ needs.validate-inputs.outputs.run_e2e_tests || 'GoldenPath-Testing' }} + azure_env_log_analytics_workspace_id: ${{ needs.validate-inputs.outputs.azure_env_log_analytics_workspace_id || '' }} + azure_existing_ai_project_resource_id: ${{ needs.validate-inputs.outputs.azure_existing_ai_project_resource_id || '' }} + existing_webapp_url: ${{ needs.validate-inputs.outputs.existing_webapp_url || '' }} + azure_env_use_case: ${{ needs.validate-inputs.outputs.azure_env_use_case || 'telecom' }} trigger_type: ${{ github.event_name }} secrets: inherit \ No newline at end of file diff --git a/.github/workflows/deploy-orchestrator.yml b/.github/workflows/deploy-orchestrator.yml index daa6a5389..c6a9a0c05 100644 --- a/.github/workflows/deploy-orchestrator.yml +++ b/.github/workflows/deploy-orchestrator.yml @@ -42,7 +42,7 @@ on: required: false default: 'GoldenPath-Testing' type: string - azure_env_log_anlytics_workspace_id: + azure_env_log_analytics_workspace_id: description: 'Log Analytics Workspace ID (Optional)' required: false default: '' @@ -57,6 +57,11 @@ on: required: false default: '' type: string + azure_env_use_case: + description: 'Azure Environment Use Case (telecom or IT_helpdesk)' + required: false + default: 'telecom' + type: string trigger_type: description: 'Trigger type (workflow_dispatch, pull_request, schedule)' required: true @@ -74,7 +79,7 @@ jobs: secrets: inherit deploy: - if: always() && (inputs.trigger_type != 'workflow_dispatch' || inputs.existing_webapp_url == '' || inputs.existing_webapp_url == null) + if: "!cancelled() && (needs.docker-build.result == 'success' || needs.docker-build.result == 'skipped') && (inputs.trigger_type != 'workflow_dispatch' || inputs.existing_webapp_url == '' || inputs.existing_webapp_url == null)" needs: docker-build uses: ./.github/workflows/job-azure-deploy.yml with: @@ -86,25 +91,27 @@ jobs: exp: ${{ inputs.exp }} build_docker_image: ${{ inputs.build_docker_image }} existing_webapp_url: ${{ inputs.existing_webapp_url }} - azure_env_log_anlytics_workspace_id: ${{ inputs.azure_env_log_anlytics_workspace_id }} + azure_env_log_analytics_workspace_id: ${{ inputs.azure_env_log_analytics_workspace_id }} azure_existing_ai_project_resource_id: ${{ inputs.azure_existing_ai_project_resource_id }} + azure_env_use_case: ${{ inputs.azure_env_use_case }} docker_image_tag: ${{ needs.docker-build.outputs.IMAGE_TAG }} run_e2e_tests: ${{ inputs.run_e2e_tests }} cleanup_resources: ${{ inputs.cleanup_resources }} secrets: inherit e2e-test: - if: always() && ((needs.deploy.result == 'success' && needs.deploy.outputs.WEB_APP_URL != '') || (inputs.existing_webapp_url != '' && inputs.existing_webapp_url != null)) && (inputs.trigger_type != 'workflow_dispatch' || (inputs.run_e2e_tests != 'None' && inputs.run_e2e_tests != '' && inputs.run_e2e_tests != null)) + if: false # E2E testing disabled needs: [docker-build, deploy] uses: ./.github/workflows/job-test-automation.yml with: KMGENERIC_URL: ${{ needs.deploy.outputs.WEB_APP_URL || inputs.existing_webapp_url }} KMGENERIC_URL_API: ${{ needs.deploy.outputs.API_APP_URL || inputs.existing_webapp_url }} TEST_SUITE: ${{ inputs.trigger_type == 'workflow_dispatch' && inputs.run_e2e_tests || 'GoldenPath-Testing' }} + AZURE_ENV_USE_CASE: ${{ inputs.azure_env_use_case }} secrets: inherit send-notification: - if: always() + if: "!cancelled()" needs: [docker-build, deploy, e2e-test] uses: ./.github/workflows/job-send-notifications.yml with: @@ -123,7 +130,7 @@ jobs: secrets: inherit cleanup-deployment: - if: always() && needs.deploy.result == 'success' && needs.deploy.outputs.RESOURCE_GROUP_NAME != '' && inputs.existing_webapp_url == '' && (inputs.trigger_type != 'workflow_dispatch' || inputs.cleanup_resources) + if: "!cancelled() && needs.deploy.outputs.RESOURCE_GROUP_NAME != '' && inputs.existing_webapp_url == '' && (inputs.trigger_type != 'workflow_dispatch' || inputs.cleanup_resources)" needs: [docker-build, deploy, e2e-test] uses: ./.github/workflows/job-cleanup-resources.yml with: diff --git a/.github/workflows/deploy-windows.yml b/.github/workflows/deploy-windows.yml index 8163196ea..7f8b709dc 100644 --- a/.github/workflows/deploy-windows.yml +++ b/.github/workflows/deploy-windows.yml @@ -30,7 +30,7 @@ on: required: false default: false type: boolean - exp: + EXP: description: 'Enable EXP' required: false default: false @@ -45,6 +45,14 @@ on: required: false default: false type: boolean + AZURE_ENV_USE_CASE: + description: 'Specify Use case to deploy' + type: 'choice' + options: + - 'telecom' + - 'IT_helpdesk' + required: false + default: 'telecom' run_e2e_tests: description: 'Run End-to-End Tests' required: false @@ -54,12 +62,12 @@ on: - 'GoldenPath-Testing' - 'Smoke-Testing' - 'None' - azure_env_log_anlytics_workspace_id: + AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: description: 'Log Analytics Workspace ID (Optional)' required: false default: '' type: string - azure_existing_ai_project_resource_id: + AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: description: 'AI Project Resource ID (Optional)' required: false default: '' @@ -75,19 +83,200 @@ permissions: actions: read jobs: + validate-inputs: + runs-on: ubuntu-latest + outputs: + validation_passed: ${{ steps.validate.outputs.passed }} + azure_location: ${{ steps.validate.outputs.azure_location }} + resource_group_name: ${{ steps.validate.outputs.resource_group_name }} + waf_enabled: ${{ steps.validate.outputs.waf_enabled }} + exp: ${{ steps.validate.outputs.exp }} + build_docker_image: ${{ steps.validate.outputs.build_docker_image }} + cleanup_resources: ${{ steps.validate.outputs.cleanup_resources }} + run_e2e_tests: ${{ steps.validate.outputs.run_e2e_tests }} + azure_env_log_analytics_workspace_id: ${{ steps.validate.outputs.azure_env_log_analytics_workspace_id }} + azure_existing_ai_project_resource_id: ${{ steps.validate.outputs.azure_existing_ai_project_resource_id }} + existing_webapp_url: ${{ steps.validate.outputs.existing_webapp_url }} + azure_env_use_case: ${{ steps.validate.outputs.azure_env_use_case }} + steps: + - name: Validate Workflow Input Parameters + id: validate + shell: bash + env: + INPUT_AZURE_LOCATION: ${{ github.event.inputs.azure_location }} + INPUT_RESOURCE_GROUP_NAME: ${{ github.event.inputs.resource_group_name }} + INPUT_WAF_ENABLED: ${{ github.event.inputs.waf_enabled }} + INPUT_EXP: ${{ github.event.inputs.EXP }} + INPUT_BUILD_DOCKER_IMAGE: ${{ github.event.inputs.build_docker_image }} + INPUT_CLEANUP_RESOURCES: ${{ github.event.inputs.cleanup_resources }} + INPUT_RUN_E2E_TESTS: ${{ github.event.inputs.run_e2e_tests }} + INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ github.event.inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} + INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ github.event.inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + INPUT_EXISTING_WEBAPP_URL: ${{ github.event.inputs.existing_webapp_url }} + INPUT_AZURE_ENV_USE_CASE: ${{ github.event.inputs.AZURE_ENV_USE_CASE }} + run: | + echo "🔍 Validating workflow input parameters..." + VALIDATION_FAILED=false + + # Validate azure_location (Azure region format) + LOCATION="${INPUT_AZURE_LOCATION:-australiaeast}" + + if [[ ! "$LOCATION" =~ ^[a-z0-9]+$ ]]; then + echo "❌ ERROR: azure_location '$LOCATION' is invalid. Must contain only lowercase letters and numbers" + VALIDATION_FAILED=true + else + echo "✅ azure_location: '$LOCATION' is valid" + fi + + # Validate resource_group_name (Azure naming convention, optional) + if [[ -n "$INPUT_RESOURCE_GROUP_NAME" ]]; then + if [[ ! "$INPUT_RESOURCE_GROUP_NAME" =~ ^[a-zA-Z0-9._\(\)-]+$ ]] || [[ "$INPUT_RESOURCE_GROUP_NAME" =~ \.$ ]]; then + echo "❌ ERROR: resource_group_name '$INPUT_RESOURCE_GROUP_NAME' is invalid. Must contain only alphanumerics, periods, underscores, hyphens, and parentheses. Cannot end with period." + VALIDATION_FAILED=true + elif [[ ${#INPUT_RESOURCE_GROUP_NAME} -gt 90 ]]; then + echo "❌ ERROR: resource_group_name '$INPUT_RESOURCE_GROUP_NAME' exceeds 90 characters (length: ${#INPUT_RESOURCE_GROUP_NAME})" + VALIDATION_FAILED=true + else + echo "✅ resource_group_name: '$INPUT_RESOURCE_GROUP_NAME' is valid" + fi + else + echo "✅ resource_group_name: Not provided (will be auto-generated)" + fi + + # Validate waf_enabled (boolean) + WAF_ENABLED="${INPUT_WAF_ENABLED:-false}" + if [[ "$WAF_ENABLED" != "true" && "$WAF_ENABLED" != "false" ]]; then + echo "❌ ERROR: waf_enabled must be 'true' or 'false', got: '$WAF_ENABLED'" + VALIDATION_FAILED=true + else + echo "✅ waf_enabled: '$WAF_ENABLED' is valid" + fi + + # Validate EXP (boolean) + EXP_ENABLED="${INPUT_EXP:-false}" + if [[ "$EXP_ENABLED" != "true" && "$EXP_ENABLED" != "false" ]]; then + echo "❌ ERROR: EXP must be 'true' or 'false', got: '$EXP_ENABLED'" + VALIDATION_FAILED=true + else + echo "✅ EXP: '$EXP_ENABLED' is valid" + fi + + # Validate build_docker_image (boolean) + BUILD_DOCKER="${INPUT_BUILD_DOCKER_IMAGE:-false}" + if [[ "$BUILD_DOCKER" != "true" && "$BUILD_DOCKER" != "false" ]]; then + echo "❌ ERROR: build_docker_image must be 'true' or 'false', got: '$BUILD_DOCKER'" + VALIDATION_FAILED=true + else + echo "✅ build_docker_image: '$BUILD_DOCKER' is valid" + fi + + # Validate cleanup_resources (boolean) + CLEANUP_RESOURCES="${INPUT_CLEANUP_RESOURCES:-false}" + if [[ "$CLEANUP_RESOURCES" != "true" && "$CLEANUP_RESOURCES" != "false" ]]; then + echo "❌ ERROR: cleanup_resources must be 'true' or 'false', got: '$CLEANUP_RESOURCES'" + VALIDATION_FAILED=true + else + echo "✅ cleanup_resources: '$CLEANUP_RESOURCES' is valid" + fi + + # Validate run_e2e_tests (specific allowed values) + TEST_OPTION="${INPUT_RUN_E2E_TESTS:-GoldenPath-Testing}" + if [[ "$TEST_OPTION" != "GoldenPath-Testing" && "$TEST_OPTION" != "Smoke-Testing" && "$TEST_OPTION" != "None" ]]; then + echo "❌ ERROR: run_e2e_tests must be one of: GoldenPath-Testing, Smoke-Testing, None, got: '$TEST_OPTION'" + VALIDATION_FAILED=true + else + echo "✅ run_e2e_tests: '$TEST_OPTION' is valid" + fi + + # Validate AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID (optional, Azure Resource ID format) + if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]]; then + if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/[Mm]icrosoft\.[Oo]perational[Ii]nsights/[Ww]orkspaces/[^/]+$ ]]; then + echo "❌ ERROR: AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID is invalid. Must be a valid Azure Resource ID format:" + echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" + echo " Got: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: Valid Resource ID format" + fi + else + echo "✅ AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: Not provided (optional)" + fi + + # Validate AZURE_EXISTING_AI_PROJECT_RESOURCE_ID (optional, Azure Resource ID format) + if [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then + if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/([Mm]icrosoft\.[Mm]achine[Ll]earning[Ss]ervices/([Ww]orkspaces|[Pp]rojects)/[^/]+|[Mm]icrosoft\.[Cc]ognitive[Ss]ervices/[Aa]ccounts/[^/]+/[Pp]rojects/[^/]+)$ ]]; then + echo "❌ ERROR: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID is invalid. Must be a valid Azure Resource ID format:" + echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/projects/{projectName}" + echo " Got: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: Valid Resource ID format" + fi + else + echo "✅ AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: Not provided (optional)" + fi + + # Validate existing_webapp_url (optional, must start with https) + if [[ -n "$INPUT_EXISTING_WEBAPP_URL" ]]; then + if [[ ! "$INPUT_EXISTING_WEBAPP_URL" =~ ^https:// ]]; then + echo "❌ ERROR: existing_webapp_url must start with 'https://', got: '$INPUT_EXISTING_WEBAPP_URL'" + VALIDATION_FAILED=true + else + echo "✅ existing_webapp_url: '$INPUT_EXISTING_WEBAPP_URL' is valid" + fi + else + echo "✅ existing_webapp_url: Not provided (will perform deployment)" + fi + + # Validate AZURE_ENV_USE_CASE (specific allowed values) + USE_CASE="${INPUT_AZURE_ENV_USE_CASE:-telecom}" + if [[ "$USE_CASE" != "telecom" && "$USE_CASE" != "IT_helpdesk" ]]; then + echo "❌ ERROR: AZURE_ENV_USE_CASE must be one of: telecom, IT_helpdesk, got: '$USE_CASE'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_USE_CASE: '$USE_CASE' is valid" + fi + + # Fail workflow if any validation failed + if [[ "$VALIDATION_FAILED" == "true" ]]; then + echo "" + echo "❌ Parameter validation failed. Please correct the errors above and try again." + exit 1 + fi + + echo "" + echo "✅ All input parameters validated successfully!" + + # Output validated values + echo "passed=true" >> $GITHUB_OUTPUT + echo "azure_location=$LOCATION" >> $GITHUB_OUTPUT + echo "resource_group_name=$INPUT_RESOURCE_GROUP_NAME" >> $GITHUB_OUTPUT + echo "waf_enabled=$WAF_ENABLED" >> $GITHUB_OUTPUT + echo "exp=$EXP_ENABLED" >> $GITHUB_OUTPUT + echo "build_docker_image=$BUILD_DOCKER" >> $GITHUB_OUTPUT + echo "cleanup_resources=$CLEANUP_RESOURCES" >> $GITHUB_OUTPUT + echo "run_e2e_tests=$TEST_OPTION" >> $GITHUB_OUTPUT + echo "azure_env_log_analytics_workspace_id=$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" >> $GITHUB_OUTPUT + echo "azure_existing_ai_project_resource_id=$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" >> $GITHUB_OUTPUT + echo "existing_webapp_url=$INPUT_EXISTING_WEBAPP_URL" >> $GITHUB_OUTPUT + echo "azure_env_use_case=$USE_CASE" >> $GITHUB_OUTPUT + Run: + needs: validate-inputs + if: needs.validate-inputs.outputs.validation_passed == 'true' uses: ./.github/workflows/deploy-orchestrator.yml with: runner_os: windows-latest - azure_location: ${{ github.event.inputs.azure_location || 'australiaeast' }} - resource_group_name: ${{ github.event.inputs.resource_group_name || '' }} - waf_enabled: ${{ github.event.inputs.waf_enabled == 'true' }} - exp: ${{ github.event.inputs.exp == 'true' }} - build_docker_image: ${{ github.event.inputs.build_docker_image == 'true' }} - cleanup_resources: ${{ github.event.inputs.cleanup_resources == 'true' }} - run_e2e_tests: ${{ github.event.inputs.run_e2e_tests || 'GoldenPath-Testing' }} - azure_env_log_anlytics_workspace_id: ${{ github.event.inputs.azure_env_log_anlytics_workspace_id || '' }} - azure_existing_ai_project_resource_id: ${{ github.event.inputs.azure_existing_ai_project_resource_id || '' }} - existing_webapp_url: ${{ github.event.inputs.existing_webapp_url || '' }} + azure_location: ${{ needs.validate-inputs.outputs.azure_location || 'australiaeast' }} + resource_group_name: ${{ needs.validate-inputs.outputs.resource_group_name || '' }} + waf_enabled: ${{ needs.validate-inputs.outputs.waf_enabled == 'true' }} + exp: ${{ needs.validate-inputs.outputs.exp == 'true' }} + build_docker_image: ${{ needs.validate-inputs.outputs.build_docker_image == 'true' }} + cleanup_resources: ${{ needs.validate-inputs.outputs.cleanup_resources == 'true' }} + run_e2e_tests: ${{ needs.validate-inputs.outputs.run_e2e_tests || 'GoldenPath-Testing' }} + azure_env_log_analytics_workspace_id: ${{ needs.validate-inputs.outputs.azure_env_log_analytics_workspace_id || '' }} + azure_existing_ai_project_resource_id: ${{ needs.validate-inputs.outputs.azure_existing_ai_project_resource_id || '' }} + existing_webapp_url: ${{ needs.validate-inputs.outputs.existing_webapp_url || '' }} + azure_env_use_case: ${{ needs.validate-inputs.outputs.azure_env_use_case || 'telecom' }} trigger_type: ${{ github.event_name }} secrets: inherit \ No newline at end of file diff --git a/.github/workflows/job-azure-deploy.yml b/.github/workflows/job-azure-deploy.yml index b51c1313d..899ab3ecf 100644 --- a/.github/workflows/job-azure-deploy.yml +++ b/.github/workflows/job-azure-deploy.yml @@ -51,7 +51,7 @@ on: required: false default: '' type: string - azure_env_log_anlytics_workspace_id: + azure_env_log_analytics_workspace_id: description: 'Log Analytics Workspace ID (Optional)' required: false default: '' @@ -61,6 +61,11 @@ on: required: false default: '' type: string + azure_env_use_case: + description: 'Azure Environment Use Case (telecom or IT_helpdesk)' + required: false + default: 'telecom' + type: string docker_image_tag: description: 'Docker Image Tag from build job' required: false @@ -114,6 +119,8 @@ jobs: AZURE_ENV_OPENAI_LOCATION: ${{ steps.set_region.outputs.AZURE_ENV_OPENAI_LOCATION }} IMAGE_TAG: ${{ steps.determine_image_tag.outputs.IMAGE_TAG }} QUOTA_FAILED: ${{ steps.quota_failure_output.outputs.QUOTA_FAILED }} + EXP_ENABLED: ${{ steps.configure_exp.outputs.EXP_ENABLED }} + steps: - name: Validate Workflow Input Parameters @@ -128,7 +135,7 @@ jobs: INPUT_EXP: ${{ inputs.exp }} INPUT_CLEANUP_RESOURCES: ${{ inputs.cleanup_resources }} INPUT_RUN_E2E_TESTS: ${{ inputs.run_e2e_tests }} - INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_anlytics_workspace_id }} + INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_analytics_workspace_id }} INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.azure_existing_ai_project_resource_id }} INPUT_EXISTING_WEBAPP_URL: ${{ inputs.existing_webapp_url }} INPUT_DOCKER_IMAGE_TAG: ${{ inputs.docker_image_tag }} @@ -227,7 +234,7 @@ jobs: # Validate AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID (Azure Resource ID format) if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]]; then - if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/microsoft\.operationalinsights/workspaces/[^/]+$ ]]; then + if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/[Mm]icrosoft\.[Oo]perational[Ii]nsights/[Ww]orkspaces/[^/]+$ ]]; then echo "❌ ERROR: AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" echo " Got: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" @@ -239,7 +246,7 @@ jobs: # Validate AZURE_EXISTING_AI_PROJECT_RESOURCE_ID (Azure Resource ID format) if [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then - if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/(Microsoft\.MachineLearningServices/(workspaces|projects)/[^/]+|Microsoft\.CognitiveServices/accounts/[^/]+/projects/[^/]+)$ ]]; then + if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/([Mm]icrosoft\.[Mm]achine[Ll]earning[Ss]ervices/([Ww]orkspaces|[Pp]rojects)/[^/]+|[Mm]icrosoft\.[Cc]ognitive[Ss]ervices/[Aa]ccounts/[^/]+/[Pp]rojects/[^/]+)$ ]]; then echo "❌ ERROR: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/projects/{projectName}" echo " Got: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" @@ -285,28 +292,35 @@ jobs: echo "✅ All input parameters validated successfully!" - name: Validate and Auto-Configure EXP + id: configure_exp shell: bash env: - INPUT_EXP: ${{ inputs.exp }} - INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_anlytics_workspace_id }} - INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.azure_existing_ai_project_resource_id }} + INPUT_EXP: ${{ inputs.EXP }} + INPUT_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} + INPUT_AI_PROJECT_RESOURCE_ID: ${{ inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} run: | echo "🔍 Validating EXP configuration..." - if [[ "$INPUT_EXP" != "true" ]]; then - if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]] || [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then - echo "🔧 AUTO-ENABLING EXP: EXP parameter values were provided but EXP was not explicitly enabled." - echo "" - echo "You provided values for:" - [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]] && echo " - Azure Log Analytics Workspace ID: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" - [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]] && echo " - Azure AI Project Resource ID: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" - echo "" - echo "✅ Automatically enabling EXP to use these values." - echo "EXP=true" >> $GITHUB_ENV - echo "📌 EXP has been automatically enabled for this deployment." - fi + EXP_ENABLED="false" + + if [[ "$INPUT_EXP" == "true" ]]; then + EXP_ENABLED="true" + echo "✅ EXP explicitly enabled by user input" + elif [[ -n "$INPUT_LOG_ANALYTICS_WORKSPACE_ID" ]] || [[ -n "$INPUT_AI_PROJECT_RESOURCE_ID" ]]; then + echo "🔧 AUTO-ENABLING EXP: EXP parameter values were provided but EXP was not explicitly enabled." + echo "" + echo "You provided values for:" + [[ -n "$INPUT_LOG_ANALYTICS_WORKSPACE_ID" ]] && echo " - Azure Log Analytics Workspace ID: '$INPUT_LOG_ANALYTICS_WORKSPACE_ID'" + [[ -n "$INPUT_AI_PROJECT_RESOURCE_ID" ]] && echo " - Azure AI Project Resource ID: '$INPUT_AI_PROJECT_RESOURCE_ID'" + echo "" + echo "✅ Automatically enabling EXP to use these values." + EXP_ENABLED="true" fi + echo "EXP_ENABLED=$EXP_ENABLED" >> $GITHUB_ENV + echo "EXP_ENABLED=$EXP_ENABLED" >> $GITHUB_OUTPUT + echo "Final EXP status: $EXP_ENABLED" + - name: Checkout Code uses: actions/checkout@v4 @@ -335,6 +349,15 @@ jobs: fi exit 1 # Fail the pipeline if any other failure occurs fi + + - name: Set Quota Failure Output + id: quota_failure_output + if: env.QUOTA_FAILED == 'true' + shell: bash + run: | + echo "QUOTA_FAILED=true" >> $GITHUB_OUTPUT + echo "Quota check failed - will notify via separate notification job" + - name: Fail Pipeline if Quota Check Fails if: env.QUOTA_FAILED == 'true' @@ -477,9 +500,10 @@ jobs: INPUT_AZURE_LOCATION: ${{ inputs.azure_location }} INPUT_RESOURCE_GROUP_NAME: ${{ inputs.resource_group_name }} WAF_ENABLED_DISPLAY: ${{ env.WAF_ENABLED == 'true' && '✅ Yes' || '❌ No' }} - EXP_DISPLAY: ${{ env.EXP == 'true' && '✅ Yes' || '❌ No' }} + EXP_DISPLAY: ${{ steps.configure_exp.outputs.EXP_ENABLED == 'true' && '✅ Yes' || '❌ No' }} CLEANUP_DISPLAY: ${{ env.CLEANUP_RESOURCES == 'true' && '✅ Yes' || '❌ No' }} BUILD_DOCKER_DISPLAY: ${{ env.BUILD_DOCKER_IMAGE == 'true' && '✅ Yes' || '❌ No' }} + AZURE_ENV_USE_CASE: ${{ inputs.azure_env_use_case }} run: | echo "## 📋 Workflow Configuration Summary" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY @@ -493,6 +517,7 @@ jobs: echo "| **Run E2E Tests** | \`${{ env.RUN_E2E_TESTS }}\` |" >> $GITHUB_STEP_SUMMARY echo "| **Cleanup Resources** | $CLEANUP_DISPLAY |" >> $GITHUB_STEP_SUMMARY echo "| **Build Docker Image** | $BUILD_DOCKER_DISPLAY |" >> $GITHUB_STEP_SUMMARY + echo "| **Use Case** | \`$AZURE_ENV_USE_CASE\` |" >> $GITHUB_STEP_SUMMARY if [[ "$INPUT_TRIGGER_TYPE" == "workflow_dispatch" && -n "$INPUT_AZURE_LOCATION" ]]; then echo "| **Azure Location** | \`$INPUT_AZURE_LOCATION\` (User Selected) |" >> $GITHUB_STEP_SUMMARY @@ -515,7 +540,7 @@ jobs: deploy-linux: name: Deploy on Linux needs: azure-setup - if: inputs.runner_os == 'ubuntu-latest' && always() && needs.azure-setup.result == 'success' + if: inputs.runner_os == 'ubuntu-latest' && !cancelled() && needs.azure-setup.result == 'success' uses: ./.github/workflows/job-deploy-linux.yml with: ENV_NAME: ${{ needs.azure-setup.outputs.ENV_NAME }} @@ -524,16 +549,17 @@ jobs: RESOURCE_GROUP_NAME: ${{ needs.azure-setup.outputs.RESOURCE_GROUP_NAME }} IMAGE_TAG: ${{ needs.azure-setup.outputs.IMAGE_TAG }} BUILD_DOCKER_IMAGE: ${{ github.event.inputs.build_docker_image || 'false' }} - EXP: ${{ github.event.inputs.exp || 'false' }} + EXP: ${{ needs.azure-setup.outputs.EXP_ENABLED }} WAF_ENABLED: ${{ inputs.waf_enabled == true && 'true' || 'false' }} - AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_anlytics_workspace_id }} + AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_analytics_workspace_id }} AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.azure_existing_ai_project_resource_id }} + AZURE_ENV_USE_CASE: ${{ inputs.azure_env_use_case }} secrets: inherit deploy-windows: name: Deploy on Windows needs: azure-setup - if: inputs.runner_os == 'windows-latest' && always() && needs.azure-setup.result == 'success' + if: inputs.runner_os == 'windows-latest' && !cancelled() && needs.azure-setup.result == 'success' uses: ./.github/workflows/job-deploy-windows.yml with: ENV_NAME: ${{ needs.azure-setup.outputs.ENV_NAME }} @@ -542,8 +568,9 @@ jobs: RESOURCE_GROUP_NAME: ${{ needs.azure-setup.outputs.RESOURCE_GROUP_NAME }} IMAGE_TAG: ${{ needs.azure-setup.outputs.IMAGE_TAG }} BUILD_DOCKER_IMAGE: ${{ github.event.inputs.build_docker_image || 'false' }} - EXP: ${{ github.event.inputs.exp || 'false' }} + EXP: ${{ needs.azure-setup.outputs.EXP_ENABLED }} WAF_ENABLED: ${{ inputs.waf_enabled == true && 'true' || 'false' }} - AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_anlytics_workspace_id }} + AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.azure_env_log_analytics_workspace_id }} AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.azure_existing_ai_project_resource_id }} + AZURE_ENV_USE_CASE: ${{ inputs.azure_env_use_case }} secrets: inherit \ No newline at end of file diff --git a/.github/workflows/job-deploy-linux.yml b/.github/workflows/job-deploy-linux.yml index 4f3f65e71..6e9e8b569 100644 --- a/.github/workflows/job-deploy-linux.yml +++ b/.github/workflows/job-deploy-linux.yml @@ -34,6 +34,10 @@ on: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: required: false type: string + AZURE_ENV_USE_CASE: + required: false + type: string + default: 'telecom' outputs: WEB_APP_URL: description: "Web App URL" @@ -65,6 +69,7 @@ jobs: INPUT_WAF_ENABLED: ${{ inputs.WAF_ENABLED }} INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + INPUT_AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | echo "🔍 Validating workflow input parameters..." VALIDATION_FAILED=false @@ -153,7 +158,7 @@ jobs: # Validate AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID (optional, if provided must be valid Resource ID) if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]]; then - if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/microsoft\.operationalinsights/workspaces/[^/]+$ ]]; then + if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/[Mm]icrosoft\.[Oo]perational[Ii]nsights/[Ww]orkspaces/[^/]+$ ]]; then echo "❌ ERROR: AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" echo " Got: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" @@ -165,7 +170,7 @@ jobs: # Validate AZURE_EXISTING_AI_PROJECT_RESOURCE_ID (optional, if provided must be valid Resource ID) if [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then - if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/(Microsoft\.MachineLearningServices/(workspaces|projects)/[^/]+|Microsoft\.CognitiveServices/accounts/[^/]+/projects/[^/]+)$ ]]; then + if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/([Mm]icrosoft\.[Mm]achine[Ll]earning[Ss]ervices/([Ww]orkspaces|[Pp]rojects)/[^/]+|[Mm]icrosoft\.[Cc]ognitive[Ss]ervices/[Aa]ccounts/[^/]+/[Pp]rojects/[^/]+)$ ]]; then echo "❌ ERROR: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/projects/{projectName}" echo " Got: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" @@ -175,6 +180,15 @@ jobs: fi fi + # Validate AZURE_ENV_USE_CASE (optional, must be 'telecom' or 'IT_helpdesk') + USE_CASE="${INPUT_AZURE_ENV_USE_CASE:-telecom}" + if [[ "$USE_CASE" != "telecom" && "$USE_CASE" != "IT_helpdesk" ]]; then + echo "❌ ERROR: AZURE_ENV_USE_CASE must be one of: telecom, IT_helpdesk, got: '$USE_CASE'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_USE_CASE: '$USE_CASE' is valid" + fi + # Fail workflow if any validation failed if [[ "$VALIDATION_FAILED" == "true" ]]; then echo "" @@ -224,6 +238,7 @@ jobs: EXP: ${{ inputs.EXP }} AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | set -e echo "Starting azd deployment..." @@ -246,6 +261,7 @@ jobs: azd env set AZURE_LOCATION="$AZURE_LOCATION" azd env set AZURE_RESOURCE_GROUP="$RESOURCE_GROUP_NAME" azd env set AZURE_ENV_IMAGETAG="$IMAGE_TAG" + azd env set AZURE_ENV_USE_CASE="$AZURE_ENV_USE_CASE" if [[ "$BUILD_DOCKER_IMAGE" == "true" ]]; then ACR_NAME=$(echo "${{ secrets.ACR_TEST_LOGIN_SERVER }}") @@ -310,6 +326,7 @@ jobs: AZURE_LOCATION: ${{ inputs.AZURE_LOCATION }} AZURE_ENV_OPENAI_LOCATION: ${{ inputs.AZURE_ENV_OPENAI_LOCATION }} IMAGE_TAG: ${{ inputs.IMAGE_TAG }} + AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | echo "## 🚀 Deploy Job Summary (Linux)" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY @@ -321,6 +338,7 @@ jobs: echo "| **Azure Region (Infrastructure)** | \`$AZURE_LOCATION\` |" >> $GITHUB_STEP_SUMMARY echo "| **Azure OpenAI Region** | \`$AZURE_ENV_OPENAI_LOCATION\` |" >> $GITHUB_STEP_SUMMARY echo "| **Docker Image Tag** | \`$IMAGE_TAG\` |" >> $GITHUB_STEP_SUMMARY + echo "| **Use Case** | \`$AZURE_ENV_USE_CASE\` |" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY if [[ "${{ job.status }}" == "success" ]]; then echo "### ✅ Deployment Details" >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/job-deploy-windows.yml b/.github/workflows/job-deploy-windows.yml index 3295444ca..4787ddbd3 100644 --- a/.github/workflows/job-deploy-windows.yml +++ b/.github/workflows/job-deploy-windows.yml @@ -34,6 +34,10 @@ on: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: required: false type: string + AZURE_ENV_USE_CASE: + required: false + type: string + default: 'telecom' AZURE_TAGS: required: false type: string @@ -68,6 +72,7 @@ jobs: INPUT_WAF_ENABLED: ${{ inputs.WAF_ENABLED }} INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + INPUT_AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | echo "🔍 Validating workflow input parameters..." VALIDATION_FAILED=false @@ -156,7 +161,7 @@ jobs: # Validate AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID (optional, if provided must be valid Resource ID) if [[ -n "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" ]]; then - if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/microsoft\.operationalinsights/workspaces/[^/]+$ ]]; then + if [[ ! "$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/[Mm]icrosoft\.[Oo]perational[Ii]nsights/[Ww]orkspaces/[^/]+$ ]]; then echo "❌ ERROR: AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}" echo " Got: '$INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID'" @@ -168,7 +173,7 @@ jobs: # Validate AZURE_EXISTING_AI_PROJECT_RESOURCE_ID (optional, if provided must be valid Resource ID) if [[ -n "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" ]]; then - if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/resourceGroups/[^/]+/providers/(Microsoft\.MachineLearningServices/(workspaces|projects)/[^/]+|Microsoft\.CognitiveServices/accounts/[^/]+/projects/[^/]+)$ ]]; then + if [[ ! "$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID" =~ ^/subscriptions/[a-fA-F0-9-]+/[Rr]esource[Gg]roups/[^/]+/providers/([Mm]icrosoft\.[Mm]achine[Ll]earning[Ss]ervices/([Ww]orkspaces|[Pp]rojects)/[^/]+|[Mm]icrosoft\.[Cc]ognitive[Ss]ervices/[Aa]ccounts/[^/]+/[Pp]rojects/[^/]+)$ ]]; then echo "❌ ERROR: AZURE_EXISTING_AI_PROJECT_RESOURCE_ID is invalid. Must be a valid Azure Resource ID format:" echo " /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/projects/{projectName}" echo " Got: '$INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID'" @@ -178,6 +183,15 @@ jobs: fi fi + # Validate AZURE_ENV_USE_CASE (optional, must be 'telecom' or 'IT_helpdesk') + USE_CASE="${INPUT_AZURE_ENV_USE_CASE:-telecom}" + if [[ "$USE_CASE" != "telecom" && "$USE_CASE" != "IT_helpdesk" ]]; then + echo "❌ ERROR: AZURE_ENV_USE_CASE must be one of: telecom, IT_helpdesk, got: '$USE_CASE'" + VALIDATION_FAILED=true + else + echo "✅ AZURE_ENV_USE_CASE: '$USE_CASE' is valid" + fi + # Fail workflow if any validation failed if [[ "$VALIDATION_FAILED" == "true" ]]; then echo "" @@ -228,6 +242,7 @@ jobs: INPUT_EXP: ${{ inputs.EXP }} INPUT_AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID: ${{ inputs.AZURE_ENV_LOG_ANALYTICS_WORKSPACE_ID }} INPUT_AZURE_EXISTING_AI_PROJECT_RESOURCE_ID: ${{ inputs.AZURE_EXISTING_AI_PROJECT_RESOURCE_ID }} + INPUT_AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | $ErrorActionPreference = "Stop" Write-Host "Starting azd deployment..." @@ -247,6 +262,7 @@ jobs: azd env set AZURE_LOCATION="$env:INPUT_AZURE_LOCATION" azd env set AZURE_RESOURCE_GROUP="$env:INPUT_RESOURCE_GROUP_NAME" azd env set AZURE_ENV_IMAGETAG="$env:INPUT_IMAGE_TAG" + azd env set AZURE_ENV_USE_CASE="$env:INPUT_AZURE_ENV_USE_CASE" # Set ACR name only when building Docker image if ($env:INPUT_BUILD_DOCKER_IMAGE -eq "true") { @@ -315,6 +331,7 @@ jobs: INPUT_AZURE_LOCATION: ${{ inputs.AZURE_LOCATION }} INPUT_AZURE_ENV_OPENAI_LOCATION: ${{ inputs.AZURE_ENV_OPENAI_LOCATION }} INPUT_IMAGE_TAG: ${{ inputs.IMAGE_TAG }} + INPUT_AZURE_ENV_USE_CASE: ${{ inputs.AZURE_ENV_USE_CASE }} run: | echo "## 🚀 Deploy Job Summary (Windows)" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY @@ -338,6 +355,7 @@ jobs: echo "| **Azure Region (Infrastructure)** | \`$INPUT_AZURE_LOCATION\` |" >> $GITHUB_STEP_SUMMARY echo "| **Azure OpenAI Region** | \`$INPUT_AZURE_ENV_OPENAI_LOCATION\` |" >> $GITHUB_STEP_SUMMARY echo "| **Docker Image Tag** | \`$INPUT_IMAGE_TAG\` |" >> $GITHUB_STEP_SUMMARY + echo "| **Use Case** | \`$INPUT_AZURE_ENV_USE_CASE\` |" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY if [ "${{ job.status }}" == "success" ]; then echo "### ✅ Deployment Details" >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/job-send-notifications.yml b/.github/workflows/job-send-notifications.yml index 2d564ffb2..6330f02b0 100644 --- a/.github/workflows/job-send-notifications.yml +++ b/.github/workflows/job-send-notifications.yml @@ -193,12 +193,12 @@ jobs: fi fi - # Validate quota_failed (must be 'true' or 'false') - if [[ "$INPUT_QUOTA_FAILED" != "true" && "$INPUT_QUOTA_FAILED" != "false" ]]; then - echo "❌ ERROR: quota_failed must be 'true' or 'false', got: '$INPUT_QUOTA_FAILED'" + # Validate QUOTA_FAILED (must be 'true', 'false', or empty string) + if [[ "$INPUT_QUOTA_FAILED" != "true" && "$INPUT_QUOTA_FAILED" != "false" && "$INPUT_QUOTA_FAILED" != "" ]]; then + echo "❌ ERROR: QUOTA_FAILED must be 'true', 'false', or empty string, got: '$INPUT_QUOTA_FAILED'" VALIDATION_FAILED=true else - echo "✅ quota_failed: '$INPUT_QUOTA_FAILED' is valid" + echo "✅ QUOTA_FAILED: '$INPUT_QUOTA_FAILED' is valid" fi # Validate test_success (must be 'true' or 'false' or empty) diff --git a/.github/workflows/job-test-automation.yml b/.github/workflows/job-test-automation.yml index 80f1ad61a..4faa36323 100644 --- a/.github/workflows/job-test-automation.yml +++ b/.github/workflows/job-test-automation.yml @@ -16,6 +16,11 @@ on: type: string default: "GoldenPath-Testing" description: "Test suite to run: 'Smoke-Testing', 'GoldenPath-Testing' " + AZURE_ENV_USE_CASE: + required: false + type: string + default: "telecom" + description: "Azure Environment Use Case: 'telecom', 'IT_helpdesk'" secrets: EMAILNOTIFICATION_LOGICAPP_URL_TA: required: false @@ -33,6 +38,7 @@ env: api_url: ${{ inputs.KMGENERIC_URL_API}} accelerator_name: "KMGeneric" test_suite: ${{ inputs.TEST_SUITE }} + azure_env_use_case: ${{ inputs.AZURE_ENV_USE_CASE }} jobs: test: diff --git a/documents/CustomizeData.md b/documents/CustomizeData.md index 12946833a..998d74ef6 100644 --- a/documents/CustomizeData.md +++ b/documents/CustomizeData.md @@ -4,9 +4,10 @@ If you would like to update the solution to leverage your own data please follow > Note: you will need to complete the deployment steps [here](./DeploymentGuide.md) before proceeding. ## Prerequisites: -1. Your data will need to be in JSON or wav format with the file name formated prefixed with "convo" then a GUID followed by a timestamp. For more examples of the data format, please review the sample transcripts and audio data included [here](/infra/data/) - * Example: convo_32e38683-bbf7-407e-a541-09b37b77921d_2024-12-07 04%3A00%3A00 - +1. Your data will need to be in JSON or wav format with the file name formated prefixed with "convo" then a GUID followed by a timestamp. For more examples of the data format, please review the sample transcripts and audio data included [here](/infra/data/telecom) + * Example: + * Transcripts: `convo_32e38683-bbf7-407e-a541-09b37b77921d_2024-12-07 04%3A00%3A00.json` + * Audio: `convo_2c703f97-6657-4a15-b8b2-db6b96630b2d_2024-12-06 06_00_00.wav` 1. Navigate to the storage account in the resource group you are using for this solution. 2. Open the `data` container diff --git a/infra/scripts/index_scripts/03_cu_process_data_text.py b/infra/scripts/index_scripts/03_cu_process_data_text.py index e2056b1ad..0cabc52d0 100644 --- a/infra/scripts/index_scripts/03_cu_process_data_text.py +++ b/infra/scripts/index_scripts/03_cu_process_data_text.py @@ -306,6 +306,10 @@ def create_tables(): create_tables() +def get_field_value(fields, field_name, default=""): + field = fields.get(field_name, {}) + return field.get('valueString', default) + # Process files and insert into DB and Search conversationIds, docs, counter = [], [], 0 for path in paths: @@ -325,17 +329,21 @@ def create_tables(): start_timestamp = datetime.strptime(start_time, timestamp_format) conversation_id = file_name.split('convo_', 1)[1].split('_')[0] conversationIds.append(conversation_id) - duration = int(result['result']['contents'][0]['fields']['Duration']['valueString']) + fields = result['result']['contents'][0]['fields'] + duration_str = get_field_value(fields, 'Duration', '0') + try: + duration = int(duration_str) + except (ValueError, TypeError): + duration = 0 end_timestamp = str(start_timestamp + timedelta(seconds=duration)).split(".")[0] start_timestamp = str(start_timestamp).split(".")[0] - fields = result['result']['contents'][0]['fields'] - summary = fields['summary']['valueString'] - satisfied = fields['satisfied']['valueString'] - sentiment = fields['sentiment']['valueString'] - topic = fields['topic']['valueString'] - key_phrases = fields['keyPhrases']['valueString'] - complaint = fields['complaint']['valueString'] - content = fields['content']['valueString'] + summary = get_field_value(fields, 'summary') + satisfied = get_field_value(fields, 'satisfied') + sentiment = get_field_value(fields, 'sentiment') + topic = get_field_value(fields, 'topic') + key_phrases = get_field_value(fields, 'keyPhrases') + complaint = get_field_value(fields, 'complaint') + content = get_field_value(fields, 'content') cursor.execute( "INSERT INTO processed_data (ConversationId, EndTime, StartTime, Content, summary, satisfied, sentiment, topic, key_phrases, complaint) VALUES (?,?,?,?,?,?,?,?,?,?)", (conversation_id, end_timestamp, start_timestamp, content, summary, satisfied, sentiment, topic, key_phrases, complaint) diff --git a/infra/scripts/index_scripts/04_cu_process_custom_data.py b/infra/scripts/index_scripts/04_cu_process_custom_data.py index 8836e311c..534a87ca8 100644 --- a/infra/scripts/index_scripts/04_cu_process_custom_data.py +++ b/infra/scripts/index_scripts/04_cu_process_custom_data.py @@ -351,6 +351,10 @@ def create_tables(): create_tables() +def get_field_value(fields, field_name, default=""): + field = fields.get(field_name, {}) + return field.get('valueString', default) + ANALYZER_ID = "ckm-json" # Process files and insert into DB and Search - transcripts conversationIds, docs, counter = [], [], 0 @@ -367,17 +371,23 @@ def create_tables(): start_timestamp = datetime.strptime(start_time, timestamp_format) conversation_id = file_name.split('convo_', 1)[1].split('_')[0] conversationIds.append(conversation_id) - duration = int(result['result']['contents'][0]['fields']['Duration']['valueString']) + + fields = result['result']['contents'][0]['fields'] + duration_str = get_field_value(fields, 'Duration', '0') + try: + duration = int(duration_str) + except (ValueError, TypeError): + duration = 0 + end_timestamp = str(start_timestamp + timedelta(seconds=duration)).split(".")[0] start_timestamp = str(start_timestamp).split(".")[0] - fields = result['result']['contents'][0]['fields'] - summary = fields['summary']['valueString'] - satisfied = fields['satisfied']['valueString'] - sentiment = fields['sentiment']['valueString'] - topic = fields['topic']['valueString'] - key_phrases = fields['keyPhrases']['valueString'] - complaint = fields['complaint']['valueString'] - content = fields['content']['valueString'] + summary = get_field_value(fields, 'summary') + satisfied = get_field_value(fields, 'satisfied') + sentiment = get_field_value(fields, 'sentiment') + topic = get_field_value(fields, 'topic') + key_phrases = get_field_value(fields, 'keyPhrases') + complaint = get_field_value(fields, 'complaint') + content = get_field_value(fields, 'content') cursor.execute( "INSERT INTO processed_data (ConversationId, EndTime, StartTime, Content, summary, satisfied, sentiment, topic, key_phrases, complaint) VALUES (?,?,?,?,?,?,?,?,?,?)", (conversation_id, end_timestamp, start_timestamp, content, summary, satisfied, sentiment, topic, key_phrases, complaint) @@ -421,19 +431,25 @@ def create_tables(): conversation_id = file_name.split('convo_', 1)[1].split('_')[0] conversationIds.append(conversation_id) - duration = int(result['result']['contents'][0]['fields']['Duration']['valueString']) + fields = result['result']['contents'][0]['fields'] + duration_str = get_field_value(fields, 'Duration', '0') + try: + duration = int(duration_str) + except (ValueError, TypeError): + duration = 0 + end_timestamp = str(start_timestamp + timedelta(seconds=duration)) end_timestamp = end_timestamp.split(".")[0] start_timestamp = str(start_timestamp).split(".")[0] - summary = result['result']['contents'][0]['fields']['summary']['valueString'] - satisfied = result['result']['contents'][0]['fields']['satisfied']['valueString'] - sentiment = result['result']['contents'][0]['fields']['sentiment']['valueString'] - topic = result['result']['contents'][0]['fields']['topic']['valueString'] - key_phrases = result['result']['contents'][0]['fields']['keyPhrases']['valueString'] - complaint = result['result']['contents'][0]['fields']['complaint']['valueString'] - content = result['result']['contents'][0]['fields']['content']['valueString'] - # print(topic) + summary = get_field_value(fields, 'summary') + satisfied = get_field_value(fields, 'satisfied') + sentiment = get_field_value(fields, 'sentiment') + topic = get_field_value(fields, 'topic') + key_phrases = get_field_value(fields, 'keyPhrases') + complaint = get_field_value(fields, 'complaint') + content = get_field_value(fields, 'content') + cursor.execute(f"INSERT INTO processed_data (ConversationId, EndTime, StartTime, Content, summary, satisfied, sentiment, topic, key_phrases, complaint) VALUES (?,?,?,?,?,?,?,?,?,?)", (conversation_id, end_timestamp, start_timestamp, content, summary, satisfied, sentiment, topic, key_phrases, complaint)) conn.commit()